----------------------------------

GCC-2.6.1 COMES TO FREEBSD-current
----------------------------------
Everybody needs to 'make world'.

Oakland, Nov 2nd 1994.  In a surprise move this sunny afternoon, the release-
engineer for the slightly delayed FreeBSD-2.0, Poul-Henning Kamp (28),
decided to pull in the new version 2.6.1 of the GNU C-compiler.
The new version of the compiler was release today at noon, and hardly 9
hours later it was committed into the FreeBSD-current source-repository.
"It's is simply because we have had too much trouble with the version 2.6.0
of the compiler" Poul-Henning told the FreeBSD-Gazette, "we took a gamble
when we decided to use that as our compiler for the 2.0 release, but it
seems to pay of in the end now" he concludes.
The move has not been discussed on the "core" list at all, and will come as
a surprise for most Poul-Hennings peers.  "I have only discussed it with
Jordan [J. K. Hubbard, the FreeBSD's resident humourist], and we agreed that
we needed to do it, so ... I did it!".  After a breath he added with a grin:
"My email will probably get an all time 'disk-full' now!".
This will bring quite a flag-day to the FreeBSD developers, the patch-file
is almost 1.4 Megabyte, and they will have to run "make world" to get
entirely -current again.  "Too bad, but we just had to do this."  Was
the only comment from Poul-Henning to these problems.
When asked how this move would impact the 2.0 release-date, Poul-Hennings
face grew dark, he mumbled some very Danish words while he moved his fingers
in strange geometrical patterns.  Immediately something ecclipsed the Sun, a
minor tremor shook the buildings, and the temperature fell significantly.
We decided not to pursure the question.

-----------
JOB-SECTION
-----------
Are you a dedicated GCC-hacker ?
We BADLY need somebody to look at the 'freebsd' OS in gcc, sanitize it and
carry the patches back to the GNU people.  In particular, we need to get
out of the "i386-only" spot we are in now.  I have the stuff to take a
gnu-dist into bmake-form, and will do that part.

Please apply to phk@freebsd.org

No Novice Need Apply.
This commit is contained in:
phk 1994-11-03 06:52:42 +00:00
parent b7837b4b50
commit 763106b915
95 changed files with 17504 additions and 11186 deletions

View File

@ -0,0 +1,178 @@
diff -u -r /freebsd/gcc-2.6.1/cccp.c ./cccp.c
--- /freebsd/gcc-2.6.1/cccp.c Tue Oct 25 15:37:44 1994
+++ ./cccp.c Wed Nov 2 17:36:25 1994
@@ -188,7 +188,7 @@
#ifndef VMS
#ifndef HAVE_STRERROR
extern int sys_nerr;
-#if defined(bsd4_4) || defined(__NetBSD__)
+#if defined(bsd4_4) || defined(__NetBSD__) || defined(__FreeBSD__)
extern const char *const sys_errlist[];
#else
extern char *sys_errlist[];
Only in .: cccp.c.orig
Only in .: cccp.o
Only in .: cexp.o
diff -u -r /freebsd/gcc-2.6.1/collect2.c ./collect2.c
--- /freebsd/gcc-2.6.1/collect2.c Thu Oct 20 15:05:46 1994
+++ ./collect2.c Wed Nov 2 17:36:25 1994
@@ -41,7 +41,7 @@
extern int errno;
#endif
-#if defined(bsd4_4) || defined(__NetBSD__)
+#if defined(bsd4_4) || defined(__NetBSD__) || defined(__FreeBSD__)
extern const char *const sys_errlist[];
#else
extern char *sys_errlist[];
Only in .: collect2.c.orig
diff -u -r /freebsd/gcc-2.6.1/config/i386/freebsd.h ./config/i386/freebsd.h
--- /freebsd/gcc-2.6.1/config/i386/freebsd.h Tue Oct 18 17:59:52 1994
+++ ./config/i386/freebsd.h Wed Nov 2 17:36:37 1994
@@ -33,11 +33,13 @@
#undef CPP_PREDEFINES
#define CPP_PREDEFINES "-Dunix -Di386 -D__FreeBSD__ -D__386BSD__ -Asystem(unix) -Asystem(FreeBSD) -Acpu(i386) -Amachine(i386)"
+#if 0
#define INCLUDE_DEFAULTS { \
{ "/usr/include", 0 }, \
{ "/usr/include/g++", 1 }, \
{ 0, 0} \
}
+#endif
/* Like the default, except no -lg. */
#define LIB_SPEC "%{!p:%{!pg:-lc}}%{p:-lc_p}%{pg:-lc_p}"
Only in ./config/i386: freebsd.h.orig
diff -u -r /freebsd/gcc-2.6.1/config/i386/x-freebsd ./config/i386/x-freebsd
--- /freebsd/gcc-2.6.1/config/i386/x-freebsd Mon Oct 31 04:52:41 1994
+++ ./config/i386/x-freebsd Wed Nov 2 18:45:36 1994
@@ -1,3 +1,6 @@
# Don't run fixproto
STMP_FIXPROTO =
-CLIB=-lgnumalloc
+CLIB = -lgnumalloc
+
+# Find FreeBSD's includes before resorting to GCC's
+LIBGCC2_INCLUDES -I/usr/include
Only in .: config.h
Only in .: config.status
Only in ./cp: Makefile
diff -u -r /freebsd/gcc-2.6.1/cp/g++.c ./cp/g++.c
--- /freebsd/gcc-2.6.1/cp/g++.c Sat Oct 29 04:17:44 1994
+++ ./cp/g++.c Wed Nov 2 17:36:42 1994
@@ -84,7 +84,7 @@
#endif
extern int sys_nerr;
-#if defined(bsd4_4) || defined(__NetBSD__)
+#if defined(bsd4_4) || defined(__NetBSD__) || defined(__FreeBSD__)
extern const char *const sys_errlist[];
#else
extern char *sys_errlist[];
Only in ./cp: g++.c.orig
Only in ./cp: include
Only in ./cp: stage1
Only in ./cp: stage2
Only in ./cp: stage3
Only in ./cp: stage4
Only in .: cpp
Only in .: float.h-nat
diff -u -r /freebsd/gcc-2.6.1/gcc.c ./gcc.c
--- /freebsd/gcc-2.6.1/gcc.c Thu Oct 27 15:49:58 1994
+++ ./gcc.c Wed Nov 2 17:36:43 1994
@@ -166,7 +166,7 @@
#endif
extern int sys_nerr;
-#if defined(bsd4_4) || defined(__NetBSD__)
+#if defined(bsd4_4) || defined(__NetBSD__) || defined (__FreeBSD__)
extern const char *const sys_errlist[];
#else
extern char *sys_errlist[];
Only in .: gcc.c.orig
Only in .: gfloat.h
diff -u -r /freebsd/gcc-2.6.1/ginclude/stdarg.h ./ginclude/stdarg.h
--- /freebsd/gcc-2.6.1/ginclude/stdarg.h Fri Jul 8 19:04:27 1994
+++ ./ginclude/stdarg.h Wed Nov 2 17:36:49 1994
@@ -136,13 +136,13 @@
But on BSD NET2 we must not test or define or undef it.
(Note that the comments in NET 2's ansi.h
are incorrect for _VA_LIST_--see stdio.h!) */
-#if !defined (_VA_LIST_) || defined (__BSD_NET2__) || defined (____386BSD____) || defined (__bsdi__) || defined (__FreeBSD__)
+#if !defined (_VA_LIST_) || defined (__BSD_NET2__) || defined (____386BSD____) || defined (__bsdi__)
/* The macro _VA_LIST is used in SCO Unix 3.2. */
#ifndef _VA_LIST
/* The macro _VA_LIST_T_H is used in the Bull dpx2 */
#ifndef _VA_LIST_T_H
#define _VA_LIST_T_H
-#if !(defined (__BSD_NET2__) || defined (____386BSD____) || defined (__bsdi__) || defined (__FreeBSD__))
+#if !(defined (__BSD_NET2__) || defined (____386BSD____) || defined (__bsdi__)
#define _VA_LIST_
#endif
#define _VA_LIST
Only in ./ginclude: stdarg.h.orig
diff -u -r /freebsd/gcc-2.6.1/ginclude/stddef.h ./ginclude/stddef.h
--- /freebsd/gcc-2.6.1/ginclude/stddef.h Fri Oct 7 16:22:35 1994
+++ ./ginclude/stddef.h Wed Nov 2 17:36:54 1994
@@ -22,7 +22,7 @@
/* On 4.3bsd-net2, make sure ansi.h is included, so we have
one less case to deal with in the following. */
-#if defined (__BSD_NET2__) || defined (____386BSD____) || defined (__FreeBSD__)
+#if defined (__BSD_NET2__) || defined (____386BSD____)
#include <machine/ansi.h>
#endif
Only in ./ginclude: stddef.h.orig
diff -u -r /freebsd/gcc-2.6.1/ginclude/varargs.h ./ginclude/varargs.h
--- /freebsd/gcc-2.6.1/ginclude/varargs.h Fri Jul 8 19:04:32 1994
+++ ./ginclude/varargs.h Wed Nov 2 17:36:59 1994
@@ -151,13 +151,13 @@
/* Michael Eriksson <mer@sics.se> at Thu Sep 30 11:00:57 1993:
Sequent defines _VA_LIST_ in <machine/machtypes.h> to be the type to
use for va_list (``typedef _VA_LIST_ va_list'') */
-#if !defined (_VA_LIST_) || defined (__BSD_NET2__) || defined (____386BSD____) || defined (__bsdi__) || defined (__sequent__) || defined (__FreeBSD__)
+#if !defined (_VA_LIST_) || defined (__BSD_NET2__) || defined (____386BSD____) || defined (__bsdi__) || defined (__sequent__)
/* The macro _VA_LIST is used in SCO Unix 3.2. */
#ifndef _VA_LIST
/* The macro _VA_LIST_T_H is used in the Bull dpx2 */
#ifndef _VA_LIST_T_H
#define _VA_LIST_T_H
-#if !(defined (__BSD_NET2__) || defined (____386BSD____) || defined (__bsdi__) || defined (__sequent__) || defined (__FreeBSD__))
+#if !(defined (__BSD_NET2__) || defined (____386BSD____) || defined (__bsdi__) || defined (__sequent__)
#define _VA_LIST_
#endif
#define _VA_LIST
Only in ./ginclude: varargs.h.orig
Only in .: hconfig.h
Only in .: include
Only in .: libgcc.a
Only in .: libgcc1.a
Only in .: libgcc2.a
Only in .: libgcc2.ready
Only in .: md
Only in .: multilib.h
Only in .: objc-headers
Only in .: obstack.o
diff -u -r /freebsd/gcc-2.6.1/protoize.c ./protoize.c
--- /freebsd/gcc-2.6.1/protoize.c Tue Oct 4 20:17:40 1994
+++ ./protoize.c Wed Nov 2 17:37:00 1994
@@ -79,7 +79,7 @@
#undef getopt
extern int errno;
-#if defined(bsd4_4) || defined(__NetBSD__)
+#if defined(bsd4_4) || defined(__NetBSD__) || defined (__FreeBSD__)
extern const char *const sys_errlist[];
#else
extern char *sys_errlist[];
Only in .: protoize.c.orig
Only in .: stage1
Only in .: stmp-fixinc
Only in .: stmp-headers
Only in .: stmp-int-hdrs
Only in .: tconfig.h
Only in .: tm.h
Only in .: version.o
Only in .: xlimits.h

261
gnu/gnu2bmake/gcc-2.6.1.tcl Executable file
View File

@ -0,0 +1,261 @@
#!/usr/local/bin/tclsh
#
# ----------------------------------------------------------------------------
# "THE BEER-WARE LICENSE" (Revision 42):
# <phk@login.dkuug.dk> wrote this file. As long as you retain this notice you
# can do whatever you want with this stuff. If we meet some day, and you think
# this stuff is worth it, you can buy me a beer in return. Poul-Henning Kamp
# ----------------------------------------------------------------------------
#
# $FreeBSD$
#
source gnu2bmake.tcl
#######################################################################
# Parameters to tweak
########
set sdir /freebsd/A/gcc-2.6.1
set ddir /freebsd/A/cc261
#######################################################################
# Do the stunt
########
sh "cd $sdir ; sh configure i386--freebsd"
# .h files on their way to ~/include
set l_include {config tm pcp tree input c-lex c-tree flags machmode real
rtl c-parse c-gperf function defaults convert obstack insn-attr
bytecode bc-emit insn-flags expr insn-codes regs hard-reg-set
insn-config loop recog bc-typecd bc-opcode bc-optab typeclass
output basic-block reload integrate conditions bytetypes bi-run
bc-arity multilib stack}
# other files on their way to ~/include
set l_include_x {tree.def machmode.def rtl.def modemap.def bc-typecd.def}
# .h files going into ~/include/i386
set l_include_i386 {perform gstabs gas bsd i386 unix }
# .c source for cpp
set l_cpp {cccp cexp version}
# .c source for cc1
set l_cc1 [zap_suffix [makefile_macro C_OBJS $sdir]]
append l_cc1 " " [zap_suffix [makefile_macro OBJS $sdir]]
append l_cc1 " " [zap_suffix [makefile_macro BC_OBJS $sdir]]
# .c source for cc
set l_cc {gcc version}
append l_cc " " [zap_suffix [makefile_macro OBSTACK $sdir]]
# .c source for c++
set l_cplus [zap_suffix [makefile_macro OBSTACK $sdir]]
# .c source for c++ from "cp" subdir
set l_cplus_cp {g++}
# .c source for cc1plus
set l_cc1plus {c-common}
append l_cc1plus " " [zap_suffix [makefile_macro OBJS $sdir]]
append l_cc1plus " " [zap_suffix [makefile_macro BC_OBJS $sdir]]
# .c source for cc1plus from "cp" subdir
set l_cc1plus_cp {}
append l_cc1plus_cp " " [zap_suffix [makefile_macro CXX_OBJS $sdir/cp]]
# .h file for cc1plus from "cp" subdir
set l_cc1plus_h {lex parse cp-tree decl class hash}
# other file for cc1plus from "cp" subdir
set l_cc1plus_x {tree.def input.c}
# All files used more than once go into the lib.
set l_common [common_set $l_cpp $l_cc1 $l_cc $l_cc1plus $l_cplus]
set l_cpp [reduce_by $l_cpp $l_common]
set l_cc1 [reduce_by $l_cc1 $l_common]
set l_cc [reduce_by $l_cc $l_common]
set l_cplus [reduce_by $l_cplus $l_common]
set l_cc1plus [reduce_by $l_cc1plus $l_common]
# functions in libgcc1
set l_libgcc1 [makefile_macro LIB1FUNCS $sdir]
# functions in libgcc2
set l_libgcc2 [makefile_macro LIB2FUNCS $sdir]
# .c files in libgcc
set l_libgcc {libgcc1.c libgcc2.c}
# .h files in libgcc
set l_libgcc_h {tconfig longlong glimits gbl-ctors}
set version [makefile_macro version $sdir]
set target [makefile_macro target $sdir]
# do ~
sh "rm -rf $ddir"
sh "mkdir $ddir"
set f [open $ddir/Makefile.inc w]
puts $f "#\n# \$FreeBSD\$\n#\n"
puts $f "CFLAGS+=\t-I\${.CURDIR} -I\${.CURDIR}/../include"
puts $f "CFLAGS+=\t-Dbsd4_4"
puts $f "CFLAGS+=\t-DGCC_INCLUDE_DIR=\\\"FOO\\\""
puts $f "CFLAGS+=\t-DGPLUSPLUS_INCLUDE_DIR=\\\"FOO\\\""
puts $f "CFLAGS+=\t-DTOOL_INCLUDE_DIR=\\\"FOO\\\""
puts $f "CFLAGS+=\t-DDEFAULT_TARGET_VERSION=\\\"$version\\\""
puts $f "CFLAGS+=\t-DDEFAULT_TARGET_MACHINE=\\\"$target\\\""
puts $f "CFLAGS+=\t-DMD_EXEC_PREFIX=\\\"/usr/libexec/\\\""
puts $f "CFLAGS+=\t-DSTANDARD_STARTFILE_PREFIX=\\\"/usr/lib\\\""
close $f
set f [open $ddir/Makefile w]
puts $f "#\n# \$FreeBSD\$\n#\n"
puts $f "PGMDIR=\tcc_int cpp cc1 cc cc1plus c++ libgcc"
puts $f "SUBDIR=\t\$(PGMDIR)"
puts $f "\n.include <bsd.subdir.mk>"
close $f
# do ~/legal
sh "mkdir $ddir/legal"
sh "cp $sdir/gen-*.c $sdir/md $ddir/legal"
set f [open $ddir/README w]
puts $f {
$FreeBSD$
This directory contains gcc in a form that uses "bmake" makefiles.
This is not the place you want to start, if you want to hack gcc.
we have included everything here which is part of the source-code
of gcc, but still, don't use this as a hacking-base.
If you suspect a problem with gcc, or just want to hack it in general,
get a complete gcc-X.Y.Z.tar.gz from somewhere, and use that.
Please look in the directory src/gnu/gnu2bmake to find the tools
to generate these files.
Thankyou.
}
# do ~/libgcc
sh "mkdir $ddir/libgcc"
set f [open $ddir/libgcc/Makefile w]
puts $f "#\n# \$FreeBSD\$\n#\n"
puts $f "LIB=\tgcc"
puts $f "INSTALL_PIC_ARCHIVE=\tyes"
puts $f "SHLIB_MAJOR=\t26"
puts $f "SHLIB_MINOR=\t1"
puts $f ""
puts $f "LIB1OBJS=\t[add_suffix $l_libgcc1 .o]"
puts $f "LIB2OBJS=\t[add_suffix $l_libgcc2 .o]"
puts $f {
OBJS= ${LIB1OBJS} ${LIB2OBJS}
LIB1SOBJS=${LIB1OBJS:.o=.so}
LIB2SOBJS=${LIB2OBJS:.o=.so}
P1OBJS=${LIB1OBJS:.o=.po}
P2OBJS=${LIB2OBJS:.o=.po}
${LIB1OBJS}: libgcc1.c
${CC} -c ${CFLAGS} -DL${.PREFIX} -o ${.TARGET} ${.CURDIR}/libgcc1.c
@${LD} -x -r ${.TARGET}
@mv a.out ${.TARGET}
${LIB2OBJS}: libgcc2.c
${CC} -c ${CFLAGS} -DL${.PREFIX} -o ${.TARGET} ${.CURDIR}/libgcc2.c
@${LD} -x -r ${.TARGET}
@mv a.out ${.TARGET}
.if !defined(NOPIC)
${LIB1SOBJS}: libgcc1.c
${CC} -c -fpic ${CFLAGS} -DL${.PREFIX} -o ${.TARGET} ${.CURDIR}/libgcc1.c
${LIB2SOBJS}: libgcc2.c
${CC} -c -fpic ${CFLAGS} -DL${.PREFIX} -o ${.TARGET} ${.CURDIR}/libgcc2.c
.endif
.if !defined(NOPROFILE)
${P1OBJS}: libgcc1.c
${CC} -c -p ${CFLAGS} -DL${.PREFIX} -o ${.TARGET} ${.CURDIR}/libgcc1.c
${P2OBJS}: libgcc2.c
${CC} -c -p ${CFLAGS} -DL${.PREFIX} -o ${.TARGET} ${.CURDIR}/libgcc2.c
.endif
.include <bsd.lib.mk>
}
close $f
copy_c $sdir $ddir/libgcc $l_libgcc
# do ~/include
sh "mkdir $ddir/include"
copy_l $sdir $ddir/include [add_suffix $l_include .h]
copy_l $sdir $ddir/include $l_include_x
copy_l $sdir $ddir/include [add_suffix $l_libgcc_h .h]
# do ~/include/i386
sh "mkdir $ddir/include/i386"
copy_l $sdir/config/i386 $ddir/include/i386 [add_suffix $l_include_i386 .h]
# do ~/cc_int
mk_lib $ddir cc_int [add_suffix $l_common .c] {
"NOPROFILE=\t1"
"\ninstall:\n\t@true"
}
copy_c $sdir $ddir/cc_int $l_common
# do ~/cpp
mk_prog $ddir cpp [add_suffix $l_cpp .c] {
"BINDIR=\t/usr/libexec"
"LDDESTDIR+=\t-L\${.CURDIR}/../cc_int/obj"
"LDDESTDIR+=\t-L\${.CURDIR}/../cc_int"
"LDADD+=\t-lcc_int"
}
copy_c $sdir $ddir/cpp $l_cpp
cp $sdir/cpp.1 $ddir/cpp/cpp.1
# do ~/c++
mk_prog $ddir c++ [add_suffix "$l_cplus $l_cplus_cp" .c] {
"BINDIR=\t/usr/bin"
"NOMAN=\t1"
"LDDESTDIR+=\t-L\${.CURDIR}/../cc_int/obj"
"LDDESTDIR+=\t-L\${.CURDIR}/../cc_int"
"LDADD+=\t-lcc_int"
}
copy_c $sdir $ddir/c++ $l_cplus
copy_c $sdir/cp $ddir/c++ $l_cplus_cp
# do ~/cc
mk_prog $ddir cc [add_suffix $l_cc .c] {
"BINDIR=\t/usr/bin"
"MLINKS+=cc.1 gcc.1"
"MLINKS+=cc.1 c++.1"
"MLINKS+=cc.1 g++.1"
"LDDESTDIR+=\t-L\${.CURDIR}/../cc_int/obj"
"LDDESTDIR+=\t-L\${.CURDIR}/../cc_int"
"LDADD+=\t-lcc_int"
"\nafterinstall:\n\tcd \$(DESTDIR)\$(BINDIR) ; rm gcc ; ln -s cc gcc"
}
copy_c $sdir $ddir/cc $l_cc
cp $sdir/gcc.1 $ddir/cc/cc.1
# do ~/cc1
mk_prog $ddir cc1 [add_suffix $l_cc1 .c] {
"BINDIR=\t/usr/libexec"
"NOMAN=\t1"
"LDDESTDIR+=\t-L\${.CURDIR}/../cc_int/obj"
"LDDESTDIR+=\t-L\${.CURDIR}/../cc_int"
"LDADD+=\t-lcc_int"
}
copy_c $sdir $ddir/cc1 $l_cc1
# do ~/cc1plus
mk_prog $ddir cc1plus [add_suffix "$l_cc1plus_cp $l_cc1plus" .c] {
"BINDIR=\t/usr/libexec"
"NOMAN=\t1"
"LDDESTDIR+=\t-L\${.CURDIR}/../cc_int/obj"
"LDDESTDIR+=\t-L\${.CURDIR}/../cc_int"
"LDADD+=\t-lcc_int"
}
copy_l $sdir/cp $ddir/cc1plus $l_cc1plus_x
copy_c $sdir $ddir/cc1plus $l_cc1plus
copy_c $sdir/cp $ddir/cc1plus $l_cc1plus_cp
copy_l $sdir/cp $ddir/cc1plus [add_suffix $l_cc1plus_h .h]
exit 0

View File

@ -5,10 +5,10 @@
LIB= gcc
INSTALL_PIC_ARCHIVE= yes
SHLIB_MAJOR= 26
SHLIB_MINOR= 0
SHLIB_MINOR= 1
LIB1OBJS= _mulsi3.o _udivsi3.o _divsi3.o _umodsi3.o _modsi3.o _lshrsi3.o _lshlsi3.o _ashrsi3.o _ashlsi3.o _divdf3.o _muldf3.o _negdf2.o _adddf3.o _subdf3.o _fixdfsi.o _fixsfsi.o _floatsidf.o _floatsisf.o _truncdfsf2.o _extendsfdf2.o _addsf3.o _negsf2.o _subsf3.o _mulsf3.o _divsf3.o _eqdf2.o _nedf2.o _gtdf2.o _gedf2.o _ltdf2.o _ledf2.o _eqsf2.o _nesf2.o _gtsf2.o _gesf2.o _ltsf2.o _lesf2.o
LIB2OBJS= _muldi3.o _divdi3.o _moddi3.o _udivdi3.o _umoddi3.o _negdi2.o _lshrdi3.o _lshldi3.o _ashldi3.o _ashrdi3.o _ffsdi2.o _udiv_w_sdiv.o _udivmoddi4.o _cmpdi2.o _ucmpdi2.o _floatdidf.o _floatdisf.o _fixunsdfsi.o _fixunssfsi.o _fixunsdfdi.o _fixdfdi.o _fixunssfdi.o _fixsfdi.o _fixxfdi.o _fixunsxfdi.o _floatdixf.o _fixunsxfsi.o _fixtfdi.o _fixunstfdi.o _floatditf.o __gcc_bcmp.o _varargs.o _eprintf.o _op_new.o _op_vnew.o _new_handler.o _op_delete.o _op_vdel.o _bb.o _shtab.o _clear_cache.o _trampoline.o __main.o _exit.o _ctors.o
LIB2OBJS= _muldi3.o _divdi3.o _moddi3.o _udivdi3.o _umoddi3.o _negdi2.o _lshrdi3.o _lshldi3.o _ashldi3.o _ashrdi3.o _ffsdi2.o _udiv_w_sdiv.o _udivmoddi4.o _cmpdi2.o _ucmpdi2.o _floatdidf.o _floatdisf.o _fixunsdfsi.o _fixunssfsi.o _fixunsdfdi.o _fixdfdi.o _fixunssfdi.o _fixsfdi.o _fixxfdi.o _fixunsxfdi.o _floatdixf.o _fixunsxfsi.o _fixtfdi.o _fixunstfdi.o _floatditf.o __gcc_bcmp.o _varargs.o _eprintf.o _op_new.o _op_vnew.o _new_handler.o _op_delete.o _op_vdel.o _bb.o _shtab.o _clear_cache.o _trampoline.o __main.o _exit.o _ctors.o _eh.o _pure.o
OBJS= ${LIB1OBJS} ${LIB2OBJS}
LIB1SOBJS=${LIB1OBJS:.o=.so}

View File

@ -1,11 +1,13 @@
#
# $Id: Makefile.inc,v 1.5 1994/09/19 21:37:42 wollman Exp $
# $Id: Makefile.inc,v 1.6 1994/10/25 07:02:18 davidg Exp $
#
CFLAGS+= -I${.CURDIR} -I${.CURDIR}/../include
CFLAGS+= -Dbsd4_4
CFLAGS+= -DGCC_INCLUDE_DIR=\"FOO\"
CFLAGS+= -DDEFAULT_TARGET_VERSION=\"2.6.0\"
CFLAGS+= -DTOOL_INCLUDE_DIR=\"FOO\"
CFLAGS+= -DGPLUSPLUS_INCLUDE_DIR=\"FOO\"
CFLAGS+= -DDEFAULT_TARGET_VERSION=\"2.6.1\"
CFLAGS+= -DDEFAULT_TARGET_MACHINE=\"i386--freebsd\"
CFLAGS+= -DMD_EXEC_PREFIX=\"/usr/libexec/\"
CFLAGS+= -DSTANDARD_STARTFILE_PREFIX=\"/usr/lib\"

View File

@ -1,5 +1,5 @@
$FreeBSD$
$Id$
This directory contains gcc in a form that uses "bmake" makefiles.
This is not the place you want to start, if you want to hack gcc.

View File

@ -40,6 +40,7 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
#include <stdio.h>
#include <sys/types.h>
#include <sys/file.h> /* May get R_OK, etc. on some systems. */
#include <errno.h>
/* Defined to the name of the compiler; if using a cross compiler, the
Makefile should compile this file with the proper name
@ -78,8 +79,12 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
#endif
#endif
extern int errno, sys_nerr;
#if defined(bsd4_4) || defined(__NetBSD__)
#ifndef errno
extern int errno;
#endif
extern int sys_nerr;
#if defined(bsd4_4) || defined(__NetBSD__) || defined(__FreeBSD__)
extern const char *const sys_errlist[];
#else
extern char *sys_errlist[];
@ -390,7 +395,7 @@ main (argc, argv)
#endif
args = (int *) malloc (argc * sizeof (int));
bzero (args, argc * sizeof (int));
bzero ((char *) args, argc * sizeof (int));
for (i = 1; i < argc; i++)
{
@ -431,7 +436,7 @@ main (argc, argv)
&& (char *)strchr ("bBVDUoeTuIYmLiA", argv[i][1]) != NULL)
|| strcmp (argv[i], "-Tdata") == 0))
quote = argv[i];
else if (((argv[i][2] == '\0'
else if (library != NULL && ((argv[i][2] == '\0'
&& (char *) strchr ("cSEM", argv[i][1]) != NULL)
|| strcmp (argv[i], "-MM") == 0))
{
@ -449,7 +454,10 @@ main (argc, argv)
int len;
if (saw_speclang)
continue;
{
saw_speclang = 0;
continue;
}
/* If the filename ends in .c or .i, put options around it.
But not if a specified -x option is currently active. */

File diff suppressed because it is too large Load Diff

View File

@ -1303,6 +1303,7 @@ duplicate_decls (newdecl, olddecl)
&& DECL_INITIAL (newdecl) != 0);
tree oldtype = TREE_TYPE (olddecl);
tree newtype = TREE_TYPE (newdecl);
char *errmsg = 0;
if (TREE_CODE (newtype) == ERROR_MARK
|| TREE_CODE (oldtype) == ERROR_MARK)
@ -1528,7 +1529,7 @@ duplicate_decls (newdecl, olddecl)
}
else
{
char *errmsg = redeclaration_error_message (newdecl, olddecl);
errmsg = redeclaration_error_message (newdecl, olddecl);
if (errmsg)
{
error_with_decl (newdecl, errmsg);
@ -1625,7 +1626,7 @@ duplicate_decls (newdecl, olddecl)
}
/* Optionally warn about more than one declaration for the same name. */
if (warn_redundant_decls && DECL_SOURCE_LINE (olddecl) != 0
if (errmsg == 0 && warn_redundant_decls && DECL_SOURCE_LINE (olddecl) != 0
/* Dont warn about a function declaration
followed by a definition. */
&& !(TREE_CODE (newdecl) == FUNCTION_DECL && DECL_INITIAL (newdecl) != 0
@ -2439,6 +2440,21 @@ shadow_label (name)
if (decl != 0)
{
register tree dup;
/* Check to make sure that the label hasn't already been declared
at this label scope */
for (dup = named_labels; dup; dup = TREE_CHAIN (dup))
if (TREE_VALUE (dup) == decl)
{
error ("duplicate label declaration `%s'",
IDENTIFIER_POINTER (name));
error_with_decl (TREE_VALUE (dup),
"this is a previous declaration");
/* Just use the previous declaration. */
return lookup_label (name);
}
shadowed_labels = tree_cons (NULL_TREE, decl, shadowed_labels);
IDENTIFIER_LABEL_VALUE (name) = decl = 0;
}
@ -3673,7 +3689,7 @@ finish_decl (decl, init, asmspec_tree)
references to it. */
/* This test used to include TREE_STATIC, but this won't be set
for function level initializers. */
if (TREE_READONLY (decl))
if (TREE_READONLY (decl) || ITERATOR_P (decl))
{
preserve_initializer ();
/* Hack? Set the permanent bit for something that is permanent,
@ -5418,7 +5434,7 @@ finish_struct (t, fieldlist)
#endif
}
}
else
else if (TREE_TYPE (x) != error_mark_node)
{
int min_align = (DECL_PACKED (x) ? BITS_PER_UNIT
: TYPE_ALIGN (TREE_TYPE (x)));
@ -5646,37 +5662,6 @@ start_enum (name)
return enumtype;
}
/* Return the minimum number of bits needed to represent VALUE in a
signed or unsigned type, UNSIGNEDP says which. */
static int
min_precision (value, unsignedp)
tree value;
int unsignedp;
{
int log;
/* If the value is negative, compute its negative minus 1. The latter
adjustment is because the absolute value of the largest negative value
is one larger than the largest positive value. This is equivalent to
a bit-wise negation, so use that operation instead. */
if (tree_int_cst_sgn (value) < 0)
value = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (value), value));
/* Return the number of bits needed, taking into account the fact
that we need one more bit for a signed than unsigned type. */
if (integer_zerop (value))
log = 0;
else if (TREE_INT_CST_HIGH (value) != 0)
log = HOST_BITS_PER_WIDE_INT + floor_log2 (TREE_INT_CST_HIGH (value));
else
log = floor_log2 (TREE_INT_CST_LOW (value));
return log + 1 + ! unsignedp;
}
/* After processing and defining all the values of an enumeration type,
install their decls in the enumeration type and finish it off.
ENUMTYPE is the type object and VALUES a list of decl-value pairs.
@ -5868,6 +5853,7 @@ start_function (declspecs, declarator, nested)
{
tree decl1, old_decl;
tree restype;
int old_immediate_size_expand = immediate_size_expand;
current_function_returns_value = 0; /* Assume, until we see it does. */
current_function_returns_null = 0;
@ -5877,6 +5863,9 @@ start_function (declspecs, declarator, nested)
named_labels = 0;
shadowed_labels = 0;
/* Don't expand any sizes in the return type of the function. */
immediate_size_expand = 0;
decl1 = grokdeclarator (declarator, declspecs, FUNCDEF, 1);
/* If the declarator is not suitable for a function definition,
@ -5922,6 +5911,11 @@ start_function (declspecs, declarator, nested)
current_function_prototype_line = DECL_SOURCE_LINE (old_decl);
}
/* If there is no explicit declaration, look for any out-of-scope implicit
declarations. */
if (old_decl == 0)
old_decl = IDENTIFIER_IMPLICIT_DECL (DECL_NAME (decl1));
/* Optionally warn of old-fashioned def with no previous prototype. */
if (warn_strict_prototypes
&& TYPE_ARG_TYPES (TREE_TYPE (decl1)) == 0
@ -5937,7 +5931,7 @@ start_function (declspecs, declarator, nested)
if the function has already been used. */
else if (warn_missing_prototypes
&& old_decl != 0 && TREE_USED (old_decl)
&& !(old_decl != 0 && TYPE_ARG_TYPES (TREE_TYPE (old_decl)) != 0))
&& TYPE_ARG_TYPES (TREE_TYPE (old_decl)) == 0)
warning_with_decl (decl1,
"`%s' was used with no prototype before its definition");
/* Optionally warn of any global def with no previous declaration. */
@ -5949,7 +5943,8 @@ start_function (declspecs, declarator, nested)
/* Optionally warn of any def with no previous declaration
if the function has already been used. */
else if (warn_missing_declarations
&& old_decl != 0 && TREE_USED (old_decl))
&& old_decl != 0 && TREE_USED (old_decl)
&& old_decl == IDENTIFIER_IMPLICIT_DECL (DECL_NAME (decl1)))
warning_with_decl (decl1,
"`%s' was used with no declaration before its definition");
@ -6006,6 +6001,8 @@ start_function (declspecs, declarator, nested)
if (TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (current_function_decl)))
TREE_ADDRESSABLE (current_function_decl) = 1;
immediate_size_expand = old_immediate_size_expand;
return 1;
}

View File

@ -395,9 +395,9 @@ static const short yyrline[] = { 0,
1576, 1585, 1590, 1595, 1600, 1604, 1608, 1619, 1626, 1633,
1640, 1651, 1655, 1658, 1663, 1686, 1720, 1745, 1774, 1789,
1800, 1804, 1808, 1811, 1816, 1818, 1821, 1823, 1827, 1832,
1835, 1841, 1846, 1851, 1853, 1862, 1863, 1869, 1871, 1876,
1878, 1882, 1885, 1891, 1894, 1896, 1898, 1900, 1907, 1912,
1917, 1919, 1928, 1931, 1936, 1939
1835, 1841, 1846, 1851, 1853, 1862, 1863, 1869, 1871, 1881,
1883, 1887, 1890, 1896, 1899, 1901, 1903, 1905, 1912, 1917,
1922, 1924, 1933, 1936, 1941, 1944
};
static const char * const yytname[] = { "$","error","$illegal.","IDENTIFIER",
@ -3252,60 +3252,65 @@ case 348:
case 349:
#line 1872 "c-parse.y"
{ yyval.ttype = get_parm_info (0);
if (pedantic)
pedwarn ("ANSI C requires a named argument before `...'");
/* Gcc used to allow this as an extension. However, it does
not work for all targets, and thus has been disabled.
Also, since func (...) and func () are indistinguishable,
it caused problems with the code in expand_builtin which
tries to verify that BUILT_IN_NEXT_ARG is being used
correctly. */
error ("ANSI C requires a named argument before `...'");
;
break;}
case 350:
#line 1877 "c-parse.y"
#line 1882 "c-parse.y"
{ yyval.ttype = get_parm_info (1); ;
break;}
case 351:
#line 1879 "c-parse.y"
#line 1884 "c-parse.y"
{ yyval.ttype = get_parm_info (0); ;
break;}
case 352:
#line 1884 "c-parse.y"
#line 1889 "c-parse.y"
{ push_parm_decl (yyvsp[0].ttype); ;
break;}
case 353:
#line 1886 "c-parse.y"
#line 1891 "c-parse.y"
{ push_parm_decl (yyvsp[0].ttype); ;
break;}
case 354:
#line 1893 "c-parse.y"
#line 1898 "c-parse.y"
{ yyval.ttype = build_tree_list (yyvsp[-1].ttype, yyvsp[0].ttype) ; ;
break;}
case 355:
#line 1895 "c-parse.y"
#line 1900 "c-parse.y"
{ yyval.ttype = build_tree_list (yyvsp[-1].ttype, yyvsp[0].ttype) ; ;
break;}
case 356:
#line 1897 "c-parse.y"
#line 1902 "c-parse.y"
{ yyval.ttype = build_tree_list (yyvsp[-1].ttype, yyvsp[0].ttype); ;
break;}
case 357:
#line 1899 "c-parse.y"
#line 1904 "c-parse.y"
{ yyval.ttype = build_tree_list (yyvsp[-1].ttype, yyvsp[0].ttype) ; ;
break;}
case 358:
#line 1901 "c-parse.y"
#line 1906 "c-parse.y"
{ yyval.ttype = build_tree_list (yyvsp[-1].ttype, yyvsp[0].ttype); ;
break;}
case 359:
#line 1908 "c-parse.y"
#line 1913 "c-parse.y"
{ pushlevel (0);
clear_parm_order ();
declare_parm_level (1); ;
break;}
case 360:
#line 1912 "c-parse.y"
#line 1917 "c-parse.y"
{ yyval.ttype = yyvsp[0].ttype;
parmlist_tags_warning ();
poplevel (0, 0, 0); ;
break;}
case 362:
#line 1920 "c-parse.y"
#line 1925 "c-parse.y"
{ tree t;
for (t = yyvsp[-1].ttype; t; t = TREE_CHAIN (t))
if (TREE_VALUE (t) == NULL_TREE)
@ -3313,19 +3318,19 @@ case 362:
yyval.ttype = tree_cons (NULL_TREE, NULL_TREE, yyvsp[-1].ttype); ;
break;}
case 363:
#line 1930 "c-parse.y"
#line 1935 "c-parse.y"
{ yyval.ttype = build_tree_list (NULL_TREE, yyvsp[0].ttype); ;
break;}
case 364:
#line 1932 "c-parse.y"
#line 1937 "c-parse.y"
{ yyval.ttype = chainon (yyvsp[-2].ttype, build_tree_list (NULL_TREE, yyvsp[0].ttype)); ;
break;}
case 365:
#line 1938 "c-parse.y"
#line 1943 "c-parse.y"
{ yyval.ttype = build_tree_list (NULL_TREE, yyvsp[0].ttype); ;
break;}
case 366:
#line 1940 "c-parse.y"
#line 1945 "c-parse.y"
{ yyval.ttype = chainon (yyvsp[-2].ttype, build_tree_list (NULL_TREE, yyvsp[0].ttype)); ;
break;}
}
@ -3526,5 +3531,5 @@ case 366:
yystate = yyn;
goto yynewstate;
}
#line 1943 "c-parse.y"
#line 1948 "c-parse.y"

View File

@ -655,7 +655,8 @@ type_lists_compatible_p (args1, args2)
/* Allow wait (union {union wait *u; int *i} *)
and wait (union wait *) to be compatible. */
if (TREE_CODE (TREE_VALUE (args1)) == UNION_TYPE
&& TYPE_NAME (TREE_VALUE (args1)) == 0
&& (TYPE_NAME (TREE_VALUE (args1)) == 0
|| TYPE_TRANSPARENT_UNION (TREE_VALUE (args1)))
&& TREE_CODE (TYPE_SIZE (TREE_VALUE (args1))) == INTEGER_CST
&& tree_int_cst_equal (TYPE_SIZE (TREE_VALUE (args1)),
TYPE_SIZE (TREE_VALUE (args2))))
@ -669,7 +670,8 @@ type_lists_compatible_p (args1, args2)
return 0;
}
else if (TREE_CODE (TREE_VALUE (args2)) == UNION_TYPE
&& TYPE_NAME (TREE_VALUE (args2)) == 0
&& (TYPE_NAME (TREE_VALUE (args2)) == 0
|| TYPE_TRANSPARENT_UNION (TREE_VALUE (args2)))
&& TREE_CODE (TYPE_SIZE (TREE_VALUE (args2))) == INTEGER_CST
&& tree_int_cst_equal (TYPE_SIZE (TREE_VALUE (args2)),
TYPE_SIZE (TREE_VALUE (args1))))
@ -990,8 +992,11 @@ default_conversion (exp)
/* Constants can be used directly unless they're not loadable. */
if (TREE_CODE (exp) == CONST_DECL)
exp = DECL_INITIAL (exp);
/* Replace a nonvolatile const static variable with its value. */
else if (optimize && TREE_CODE (exp) == VAR_DECL)
/* Replace a nonvolatile const static variable with its value unless
it is an array, in which case we must be sure that taking the
address of the array produces consistent results. */
else if (optimize && TREE_CODE (exp) == VAR_DECL && code != ARRAY_TYPE)
{
exp = decl_constant_value (exp);
type = TREE_TYPE (exp);
@ -1630,36 +1635,27 @@ convert_arguments (typelist, values, name, fundecl)
}
else
{
#if 0 /* This turns out not to win--there's no way to write a prototype
for a function whose arg type is a union with no tag. */
/* Nameless union automatically casts the types it contains. */
if (TREE_CODE (type) == UNION_TYPE && TYPE_NAME (type) == 0)
{
tree field;
for (field = TYPE_FIELDS (type); field;
field = TREE_CHAIN (field))
if (comptypes (TYPE_MAIN_VARIANT (TREE_TYPE (field)),
TYPE_MAIN_VARIANT (TREE_TYPE (val))))
break;
if (field)
val = build1 (CONVERT_EXPR, type, val);
}
#endif
/* Optionally warn about conversions that
differ from the default conversions. */
if (warn_conversion)
{
int formal_prec = TYPE_PRECISION (type);
if (TREE_CODE (type) != REAL_TYPE
if (INTEGRAL_TYPE_P (type)
&& TREE_CODE (TREE_TYPE (val)) == REAL_TYPE)
warn_for_assignment ("%s as integer rather than floating due to prototype", (char *) 0, name, parmnum + 1);
else if (TREE_CODE (type) == COMPLEX_TYPE
&& TREE_CODE (TREE_TYPE (val)) == REAL_TYPE)
warn_for_assignment ("%s as complex rather than floating due to prototype", (char *) 0, name, parmnum + 1);
else if (TREE_CODE (type) == REAL_TYPE
&& TREE_CODE (TREE_TYPE (val)) != REAL_TYPE)
&& INTEGRAL_TYPE_P (TREE_TYPE (val)))
warn_for_assignment ("%s as floating rather than integer due to prototype", (char *) 0, name, parmnum + 1);
else if (TREE_CODE (type) == REAL_TYPE
&& TREE_CODE (TREE_TYPE (val)) == COMPLEX_TYPE)
warn_for_assignment ("%s as floating rather than complex due to prototype", (char *) 0, name, parmnum + 1);
/* ??? At some point, messages should be written about
conversions between complex types, but that's too messy
to do now. */
else if (TREE_CODE (type) == REAL_TYPE
&& TREE_CODE (TREE_TYPE (val)) == REAL_TYPE)
{
@ -1669,10 +1665,8 @@ convert_arguments (typelist, values, name, fundecl)
warn_for_assignment ("%s as `float' rather than `double' due to prototype", (char *) 0, name, parmnum + 1);
}
/* Detect integer changing in width or signedness. */
else if ((TREE_CODE (type) == INTEGER_TYPE
|| TREE_CODE (type) == ENUMERAL_TYPE)
&& (TREE_CODE (TREE_TYPE (val)) == INTEGER_TYPE
|| TREE_CODE (TREE_TYPE (val)) == ENUMERAL_TYPE))
else if (INTEGRAL_TYPE_P (type)
&& INTEGRAL_TYPE_P (TREE_TYPE (val)))
{
tree would_have_been = default_conversion (val);
tree type1 = TREE_TYPE (would_have_been);
@ -2798,7 +2792,7 @@ build_unary_op (code, xarg, noconvert)
((code == PREINCREMENT_EXPR
|| code == POSTINCREMENT_EXPR)
? "increment" : "decrement"));
inc = c_sizeof_nowarn (TREE_TYPE (result_type));
inc = c_size_in_bytes (TREE_TYPE (result_type));
}
else
inc = integer_one_node;
@ -3211,6 +3205,18 @@ mark_addressable (exp)
IDENTIFIER_POINTER (DECL_NAME (x)));
return 0;
}
/* If we are making this addressable due to its having
volatile components, give a different error message. Also
handle the case of an unnamed parameter by not trying
to give the name. */
else if (C_TYPE_FIELDS_VOLATILE (TREE_TYPE (x)))
{
error ("cannot put object with volatile field into register");
return 0;
}
pedwarn ("address of register variable `%s' requested",
IDENTIFIER_POINTER (DECL_NAME (x)));
}
@ -3868,14 +3874,15 @@ convert_for_assignment (type, rhs, errtype, fundecl, funname, parmnum)
/* Arithmetic types all interconvert, and enum is treated like int. */
if ((codel == INTEGER_TYPE || codel == REAL_TYPE || codel == ENUMERAL_TYPE
|| codel == COMPLEX_TYPE)
&&
(coder == INTEGER_TYPE || coder == REAL_TYPE || coder == ENUMERAL_TYPE
|| coder == COMPLEX_TYPE))
&& (coder == INTEGER_TYPE || coder == REAL_TYPE || coder == ENUMERAL_TYPE
|| coder == COMPLEX_TYPE))
return convert_and_check (type, rhs);
/* Conversion to a union from its member types. */
else if (codel == UNION_TYPE)
{
tree memb_types;
for (memb_types = TYPE_FIELDS (type); memb_types;
memb_types = TREE_CHAIN (memb_types))
{
@ -3886,6 +3893,7 @@ convert_for_assignment (type, rhs, errtype, fundecl, funname, parmnum)
pedwarn ("ANSI C prohibits argument conversion to union type");
return build1 (NOP_EXPR, type, rhs);
}
else if (coder == POINTER_TYPE
&& TREE_CODE (TREE_TYPE (memb_types)) == POINTER_TYPE)
{
@ -3895,44 +3903,59 @@ convert_for_assignment (type, rhs, errtype, fundecl, funname, parmnum)
/* Any non-function converts to a [const][volatile] void *
and vice versa; otherwise, targets must be the same.
Meanwhile, the lhs target must have all the qualifiers of the rhs. */
Meanwhile, the lhs target must have all the qualifiers of
the rhs. */
if (TYPE_MAIN_VARIANT (ttl) == void_type_node
|| TYPE_MAIN_VARIANT (ttr) == void_type_node
|| comp_target_types (memb_type, rhstype))
{
/* Const and volatile mean something different for function types,
so the usual warnings are not appropriate. */
/* Const and volatile mean something different for function
types, so the usual warnings are not appropriate. */
if (TREE_CODE (ttr) != FUNCTION_TYPE
|| TREE_CODE (ttl) != FUNCTION_TYPE)
{
if (! TYPE_READONLY (ttl) && TYPE_READONLY (ttr))
warn_for_assignment ("%s discards `const' from pointer target type",
get_spelling (errtype), funname, parmnum);
get_spelling (errtype), funname,
parmnum);
if (! TYPE_VOLATILE (ttl) && TYPE_VOLATILE (ttr))
warn_for_assignment ("%s discards `volatile' from pointer target type",
get_spelling (errtype), funname, parmnum);
get_spelling (errtype), funname,
parmnum);
}
else
{
/* Because const and volatile on functions are restrictions
that say the function will not do certain things,
it is okay to use a const or volatile function
where an ordinary one is wanted, but not vice-versa. */
/* Because const and volatile on functions are
restrictions that say the function will not do
certain things, it is okay to use a const or volatile
function where an ordinary one is wanted, but not
vice-versa. */
if (TYPE_READONLY (ttl) && ! TYPE_READONLY (ttr))
warn_for_assignment ("%s makes `const *' function pointer from non-const",
get_spelling (errtype), funname, parmnum);
get_spelling (errtype), funname,
parmnum);
if (TYPE_VOLATILE (ttl) && ! TYPE_VOLATILE (ttr))
warn_for_assignment ("%s makes `volatile *' function pointer from non-volatile",
get_spelling (errtype), funname, parmnum);
get_spelling (errtype), funname,
parmnum);
}
if (pedantic
&& !(fundecl != 0 && DECL_IN_SYSTEM_HEADER (fundecl)))
pedwarn ("ANSI C prohibits argument conversion to union type");
return build1 (NOP_EXPR, type, rhs);
}
}
/* Can convert integer zero to any pointer type. */
else if (TREE_CODE (TREE_TYPE (memb_types)) == POINTER_TYPE
&& (integer_zerop (rhs)
|| (TREE_CODE (rhs) == NOP_EXPR
&& integer_zerop (TREE_OPERAND (rhs, 0)))))
return build1 (NOP_EXPR, type, null_pointer_node);
}
}
/* Conversions among pointers */
else if (codel == POINTER_TYPE && coder == POINTER_TYPE)
{
@ -5155,8 +5178,8 @@ push_init_level (implicit)
/* Structure elements may require alignment. Do this now
if necessary for the subaggregate. */
if (constructor_incremental && TREE_CODE (constructor_type) == RECORD_TYPE
&& constructor_fields)
if (constructor_incremental && constructor_type != 0
&& TREE_CODE (constructor_type) == RECORD_TYPE && constructor_fields)
{
/* Advance to offset of this element. */
if (! tree_int_cst_equal (constructor_bit_index,
@ -6020,6 +6043,7 @@ process_init_element (value)
/* Otherwise, if we have come to a subaggregate,
and we don't have an element of its type, push into it. */
else if (value != 0 && !constructor_no_implicit
&& value != error_mark_node
&& TYPE_MAIN_VARIANT (TREE_TYPE (value)) != fieldtype
&& (fieldcode == RECORD_TYPE || fieldcode == ARRAY_TYPE
|| fieldcode == UNION_TYPE))
@ -6083,6 +6107,7 @@ process_init_element (value)
/* Otherwise, if we have come to a subaggregate,
and we don't have an element of its type, push into it. */
else if (value != 0 && !constructor_no_implicit
&& value != error_mark_node
&& TYPE_MAIN_VARIANT (TREE_TYPE (value)) != fieldtype
&& (fieldcode == RECORD_TYPE || fieldcode == ARRAY_TYPE
|| fieldcode == UNION_TYPE))
@ -6126,6 +6151,7 @@ process_init_element (value)
/* Otherwise, if we have come to a subaggregate,
and we don't have an element of its type, push into it. */
else if (value != 0 && !constructor_no_implicit
&& value != error_mark_node
&& TYPE_MAIN_VARIANT (TREE_TYPE (value)) != elttype
&& (eltcode == RECORD_TYPE || eltcode == ARRAY_TYPE
|| eltcode == UNION_TYPE))
@ -6142,6 +6168,10 @@ process_init_element (value)
break;
}
/* In the case of [LO .. HI] = VALUE, only evaluate VALUE once. */
if (constructor_range_end)
value = save_expr (value);
/* Now output the actual element.
Ordinarily, output once.
If there is a range, repeat it till we advance past the range. */

View File

@ -700,11 +700,17 @@ compute_conversion_costs (function, tta_in, cp, arglen)
int strike_index = 0, win;
struct harshness_code lose;
extern int cp_silent;
#ifdef GATHER_STATISTICS
n_compute_conversion_costs++;
#endif
#ifndef DEBUG_MATCHING
/* We don't emit any warnings or errors while trying out each candidate. */
cp_silent = 1;
#endif
cp->function = function;
cp->arg = tta ? TREE_VALUE (tta) : NULL_TREE;
cp->u.bad_arg = 0; /* optimistic! */
@ -712,7 +718,7 @@ compute_conversion_costs (function, tta_in, cp, arglen)
cp->h.code = 0;
cp->h.distance = 0;
cp->h.int_penalty = 0;
bzero (cp->harshness,
bzero ((char *) cp->harshness,
(cp->h_len + 1) * sizeof (struct harshness_code));
while (ttf && tta)
@ -812,6 +818,7 @@ compute_conversion_costs (function, tta_in, cp, arglen)
{
cp->h.code = EVIL_CODE;
cp->u.bad_arg = -1;
cp_silent = 0;
return;
}
else
@ -833,6 +840,7 @@ compute_conversion_costs (function, tta_in, cp, arglen)
{
cp->h.code = EVIL_CODE;
cp->u.bad_arg = -2;
cp_silent = 0;
return;
}
/* Store index of first default. */
@ -855,6 +863,7 @@ compute_conversion_costs (function, tta_in, cp, arglen)
if (dont_convert_types)
{
cp->h.code = EVIL_CODE;
cp_silent = 0;
return;
}
@ -1002,6 +1011,7 @@ compute_conversion_costs (function, tta_in, cp, arglen)
cp->h.code |= ELLIPSIS_CODE;
if (user_strikes)
cp->h.code |= USER_CODE;
cp_silent = 0;
#ifdef DEBUG_MATCHING
cp_error ("final eval %s", print_harshness (&cp->h));
#endif
@ -1428,11 +1438,11 @@ build_scoped_method_call (exp, scopes, name, parms)
if (type != basetype)
cp_error ("type of `%E' does not match destructor type `%T' (type was `%T')",
exp, basetype, type);
name = IDENTIFIER_TYPE_VALUE (TREE_OPERAND (name, 0));
if (basetype != name)
cp_error ("qualified type `%T' does not match destructor type `%T'",
name = TREE_OPERAND (name, 0);
if (basetype != get_type_value (name))
cp_error ("qualified type `%T' does not match destructor name `~%T'",
basetype, name);
return void_zero_node;
return convert (void_type_node, exp);
}
if (! is_aggr_typedef (basename, 1))
@ -1460,15 +1470,16 @@ build_scoped_method_call (exp, scopes, name, parms)
{
/* Explicit call to destructor. */
name = TREE_OPERAND (name, 0);
if (name != constructor_name (TREE_TYPE (decl)))
if (! (name == constructor_name (TREE_TYPE (decl))
|| TREE_TYPE (decl) == get_type_value (name)))
{
cp_error
("qualified type `%T' does not match destructor type `%T'",
("qualified type `%T' does not match destructor name `~%T'",
TREE_TYPE (decl), name);
return error_mark_node;
}
if (! TYPE_HAS_DESTRUCTOR (TREE_TYPE (decl)))
return void_zero_node;
return convert (void_type_node, exp);
return build_delete (TREE_TYPE (decl), decl, integer_two_node,
LOOKUP_NORMAL|LOOKUP_NONVIRTUAL|LOOKUP_DESTRUCTOR,
@ -1604,23 +1615,19 @@ build_method_call (instance, name, parms, basetype_path, flags)
if (parms)
error ("destructors take no parameters");
basetype = TREE_TYPE (instance);
if (IS_AGGR_TYPE (basetype))
if (TREE_CODE (basetype) == REFERENCE_TYPE)
basetype = TREE_TYPE (basetype);
if (! ((IS_AGGR_TYPE (basetype)
&& name == constructor_name (basetype))
|| basetype == get_type_value (name)))
{
if (name == constructor_name (basetype))
goto huzzah;
cp_error ("destructor name `~%D' does not match type `%T' of expression",
name, basetype);
return convert (void_type_node, instance);
}
else
{
if (basetype == get_type_value (name))
goto huzzah;
}
cp_error ("destructor name `~%D' does not match type `%T' of expression",
name, basetype);
return void_zero_node;
huzzah:
if (! TYPE_HAS_DESTRUCTOR (basetype))
return void_zero_node;
return convert (void_type_node, instance);
instance = default_conversion (instance);
instance_ptr = build_unary_op (ADDR_EXPR, instance, 0);
return build_delete (build_pointer_type (basetype),
@ -1806,7 +1813,11 @@ build_method_call (instance, name, parms, basetype_path, flags)
}
else
{
if (TREE_CODE (instance) != CALL_EXPR)
if (TREE_CODE (instance) != CALL_EXPR
#ifdef PCC_STATIC_STRUCT_RETURN
&& TREE_CODE (instance) != RTL_EXPR
#endif
)
my_friendly_abort (125);
if (TYPE_NEEDS_CONSTRUCTING (basetype))
instance = build_cplus_new (basetype, instance, 0);
@ -1897,6 +1908,8 @@ build_method_call (instance, name, parms, basetype_path, flags)
{
TREE_VALUE (parm) = build_unary_op (ADDR_EXPR, TREE_VALUE (parm), 0);
}
#if 0
/* This breaks reference-to-array parameters. */
if (TREE_CODE (t) == ARRAY_TYPE)
{
/* Perform the conversion from ARRAY_TYPE to POINTER_TYPE in place.
@ -1904,6 +1917,7 @@ build_method_call (instance, name, parms, basetype_path, flags)
TREE_VALUE (parm) = default_conversion (TREE_VALUE (parm));
t = TREE_TYPE (TREE_VALUE (parm));
}
#endif
if (t == error_mark_node)
return error_mark_node;
last = build_tree_list (NULL_TREE, t);
@ -1932,7 +1946,9 @@ build_method_call (instance, name, parms, basetype_path, flags)
{
constp = 0;
volatilep = 0;
parms = tree_cons (NULL_TREE, build1 (NOP_EXPR, TYPE_POINTER_TO (basetype), integer_zero_node), parms);
parms = tree_cons (NULL_TREE,
build1 (NOP_EXPR, TYPE_POINTER_TO (basetype),
integer_zero_node), parms);
}
else
{
@ -1945,6 +1961,10 @@ build_method_call (instance, name, parms, basetype_path, flags)
TREE_CALLS_NEW (instance_ptr) = 1;
instance = build_indirect_ref (instance_ptr, NULL_PTR);
#if 0
/* This breaks initialization of a reference from a new
expression of a different type. And it doesn't appear to
serve its original purpose any more, either. jason 10/12/94 */
/* If it's a default argument initialized from a ctor, what we get
from instance_ptr will match the arglist for the FUNCTION_DECL
of the constructor. */
@ -1953,6 +1973,7 @@ build_method_call (instance, name, parms, basetype_path, flags)
&& TREE_CALLS_NEW (TREE_VALUE (TREE_OPERAND (TREE_VALUE (parms), 1))))
parms = build_tree_list (NULL_TREE, instance_ptr);
else
#endif
parms = tree_cons (NULL_TREE, instance_ptr, parms);
}
}
@ -2012,6 +2033,7 @@ build_method_call (instance, name, parms, basetype_path, flags)
return error_mark_node;
#if 0
/* Now, go look for this method name. We do not find destructors here.
Putting `void_list_node' on the end of the parmtypes
@ -2021,6 +2043,7 @@ build_method_call (instance, name, parms, basetype_path, flags)
1 + (name == constructor_name (save_basetype)
|| name == constructor_name_full (save_basetype)));
TREE_CHAIN (last) = NULL_TREE;
#endif
for (pass = 0; pass < 2; pass++)
{
@ -2040,7 +2063,7 @@ build_method_call (instance, name, parms, basetype_path, flags)
candidates
= (struct candidate *) alloca ((ever_seen+1)
* sizeof (struct candidate));
bzero (candidates, (ever_seen + 1) * sizeof (struct candidate));
bzero ((char *) candidates, (ever_seen + 1) * sizeof (struct candidate));
cp = candidates;
len = list_length (parms);
ever_seen = 0;
@ -2062,7 +2085,7 @@ build_method_call (instance, name, parms, basetype_path, flags)
{
tree new_type;
parm = build_indirect_ref (parm, "friendifying parms (compiler error)");
new_type = c_build_type_variant (TREE_TYPE (parm), constp,
new_type = cp_build_type_variant (TREE_TYPE (parm), constp,
volatilep);
new_type = build_reference_type (new_type);
parm = convert (new_type, parm);
@ -2147,9 +2170,11 @@ build_method_call (instance, name, parms, basetype_path, flags)
&& ! DECL_STATIC_FUNCTION_P (function))
continue;
#if 0
if (pass == 0
&& DECL_ASSEMBLER_NAME (function) == method_name)
goto found;
#endif
if (pass > 0)
{
@ -2244,6 +2269,7 @@ build_method_call (instance, name, parms, basetype_path, flags)
if (cp - candidates > 1)
{
int n_candidates = cp - candidates;
extern int warn_synth;
TREE_VALUE (parms) = instance_ptr;
cp = ideal_candidate (save_basetype, candidates,
n_candidates, parms, len);
@ -2251,14 +2277,25 @@ build_method_call (instance, name, parms, basetype_path, flags)
{
if (flags & LOOKUP_COMPLAIN)
{
cp_error ("call of overloaded %s `%D' is ambiguous",
name_kind, name);
TREE_CHAIN (last) = void_list_node;
cp_error ("call of overloaded %s `%D(%A)' is ambiguous",
name_kind, name, TREE_CHAIN (parmtypes));
print_n_candidates (candidates, n_candidates);
}
return error_mark_node;
}
if (cp->h.code & EVIL_CODE)
return error_mark_node;
if (warn_synth
&& DECL_NAME (cp->function) == ansi_opname[MODIFY_EXPR]
&& DECL_ARTIFICIAL (cp->function)
&& n_candidates == 2)
{
cp_warning ("using synthesized `%#D' for copy assignment",
cp->function);
cp_warning_at (" where cfront would use `%#D'",
candidates->function);
}
}
else if (cp[-1].h.code & EVIL_CODE)
{
@ -2664,7 +2701,11 @@ build_overload_call_real (fnname, parms, flags, final_cp, buildxxx)
final_cp->h.code = EVIL_CODE;
return error_mark_node;
}
if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == OFFSET_TYPE)
if (TREE_CODE (t) == OFFSET_TYPE)
#if 0
/* This breaks reference-to-array parameters. */
|| TREE_CODE (t) == ARRAY_TYPE
#endif
{
/* Perform the conversion from ARRAY_TYPE to POINTER_TYPE in place.
Also convert OFFSET_TYPE entities to their normal selves.
@ -2738,7 +2779,7 @@ build_overload_call_real (fnname, parms, flags, final_cp, buildxxx)
{
candidates
= (struct candidate *)alloca ((length+1) * sizeof (struct candidate));
bzero (candidates, (length + 1) * sizeof (struct candidate));
bzero ((char *) candidates, (length + 1) * sizeof (struct candidate));
}
cp = candidates;

View File

@ -942,7 +942,7 @@ add_method (type, fields, method)
* sizeof (char *)
+ len * sizeof (tree));
tmp_vec = (tree) obstack_base (ob);
bcopy (method_vec, tmp_vec,
bcopy ((char *) method_vec, (char *) tmp_vec,
(sizeof (struct tree_common)
+ tree_code_length[(int) TREE_VEC] * sizeof (char *)
+ (len-1) * sizeof (tree)));
@ -992,20 +992,18 @@ add_method (type, fields, method)
not duplicates, they are just anonymous fields. This happens
when we have unnamed bitfields, for example. */
static tree
delete_duplicate_fields_1 (field, field_ptr, fields)
tree field, *field_ptr, fields;
delete_duplicate_fields_1 (field, fields)
tree field, fields;
{
tree x;
tree prev = field_ptr ? *field_ptr : 0;
tree prev = 0;
if (DECL_NAME (field) == 0)
{
if (TREE_CODE (TREE_TYPE (field)) != UNION_TYPE)
return fields;
for (x = TYPE_FIELDS (TREE_TYPE (field)); x; x = TREE_CHAIN (x))
fields = delete_duplicate_fields_1 (x, field_ptr, fields);
if (prev)
TREE_CHAIN (prev) = fields;
fields = delete_duplicate_fields_1 (x, fields);
return fields;
}
else
@ -1017,7 +1015,7 @@ delete_duplicate_fields_1 (field, field_ptr, fields)
if (TREE_CODE (TREE_TYPE (x)) != UNION_TYPE)
continue;
TYPE_FIELDS (TREE_TYPE (x))
= delete_duplicate_fields_1 (field, (tree *)0, TYPE_FIELDS (TREE_TYPE (x)));
= delete_duplicate_fields_1 (field, TYPE_FIELDS (TREE_TYPE (x)));
if (TYPE_FIELDS (TREE_TYPE (x)) == 0)
{
if (prev == 0)
@ -1039,7 +1037,7 @@ delete_duplicate_fields_1 (field, field_ptr, fields)
x);
else if (TREE_CODE (field) == TYPE_DECL
&& TREE_CODE (x) == TYPE_DECL)
cp_error_at ("duplicate class scope type `%D'", x);
cp_error_at ("duplicate nested type `%D'", x);
else if (TREE_CODE (field) == TYPE_DECL
|| TREE_CODE (x) == TYPE_DECL)
cp_error_at ("duplicate field `%D' (as type and non-type)",
@ -1063,7 +1061,7 @@ delete_duplicate_fields (fields)
{
tree x;
for (x = fields; x && TREE_CHAIN (x); x = TREE_CHAIN (x))
TREE_CHAIN (x) = delete_duplicate_fields_1 (x, &x, TREE_CHAIN (x));
TREE_CHAIN (x) = delete_duplicate_fields_1 (x, TREE_CHAIN (x));
}
/* Change the access of FDECL to ACCESS in T.
@ -1121,10 +1119,14 @@ get_vfield_offset (binfo)
BINFO_OFFSET (binfo));
}
/* Get the offset to the start of the original binfo that we derived this
binfo from. */
tree get_derived_offset (binfo)
tree binfo;
/* Get the offset to the start of the original binfo that we derived
this binfo from. If we find TYPE first, return the offset only
that far. The shortened search is useful because the this pointer
on method calling is expected to point to a DECL_CONTEXT (fndecl)
object, and not a baseclass of it. */
static tree
get_derived_offset (binfo, type)
tree binfo, type;
{
tree offset1 = get_vfield_offset (TYPE_BINFO (BINFO_TYPE (binfo)));
tree offset2;
@ -1133,6 +1135,8 @@ tree get_derived_offset (binfo)
&& (i=CLASSTYPE_VFIELD_PARENT (BINFO_TYPE (binfo))) != -1)
{
tree binfos = BINFO_BASETYPES (binfo);
if (BINFO_TYPE (binfo) == type)
break;
binfo = TREE_VEC_ELT (binfos, i);
}
offset2 = get_vfield_offset (TYPE_BINFO (BINFO_TYPE (binfo)));
@ -1340,7 +1344,7 @@ finish_base_struct (t, b, t_binfo)
tree binfos = BINFO_BASETYPES (t_binfo);
int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
int first_vfn_base_index = -1;
bzero (b, sizeof (struct base_info));
bzero ((char *) b, sizeof (struct base_info));
for (i = 0; i < n_baseclasses; i++)
{
@ -1682,8 +1686,8 @@ finish_struct_bits (t, max_has_virtual)
tree *tmp;
int i;
bzero (first_conversions, sizeof (first_conversions));
bzero (last_conversions, sizeof (last_conversions));
bzero ((char *) first_conversions, sizeof (first_conversions));
bzero ((char *) last_conversions, sizeof (last_conversions));
for (tmp = &TREE_VEC_ELT (method_vec, 1);
tmp != TREE_VEC_END (method_vec); tmp += 1)
{
@ -1830,8 +1834,7 @@ finish_struct_methods (t, fn_fields, nonprivate_method)
{
tree parmtype = TREE_VALUE (FUNCTION_ARG_CHAIN (fn_fields));
if (TREE_CODE (parmtype) == REFERENCE_TYPE
&& TYPE_MAIN_VARIANT (TREE_TYPE (parmtype)) == t)
if (copy_assignment_arg_p (parmtype, DECL_VIRTUAL_P (fn_fields)))
{
if (TREE_PROTECTED (fn_fields))
TYPE_HAS_NONPUBLIC_ASSIGN_REF (t) = 1;
@ -1938,8 +1941,8 @@ finish_struct_methods (t, fn_fields, nonprivate_method)
&& CLASSTYPE_FRIEND_CLASSES (t) == NULL_TREE
&& DECL_FRIENDLIST (TYPE_NAME (t)) == NULL_TREE
&& warn_ctor_dtor_privacy)
warning ("class `%s' only defines a private destructor and has no friends",
TYPE_NAME_STRING (t));
cp_warning ("`%#T' only defines a private destructor and has no friends",
t);
break;
}
}
@ -2060,7 +2063,7 @@ duplicate_tag_error (t)
int interface_only = CLASSTYPE_INTERFACE_ONLY (t);
int interface_unknown = CLASSTYPE_INTERFACE_UNKNOWN (t);
bzero (TYPE_LANG_SPECIFIC (t), sizeof (struct lang_type));
bzero ((char *) TYPE_LANG_SPECIFIC (t), sizeof (struct lang_type));
BINFO_BASETYPES(binfo) = NULL_TREE;
CLASSTYPE_AS_LIST (t) = as_list;
@ -2151,6 +2154,86 @@ overrides (fndecl, base_fndecl)
return 0;
}
static tree
get_class_offset_1 (parent, binfo, context, t, fndecl)
tree parent, binfo, context, t, fndecl;
{
tree binfos = BINFO_BASETYPES (binfo);
int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
tree rval = NULL_TREE;
if (binfo == parent)
return error_mark_node;
for (i = 0; i < n_baselinks; i++)
{
tree base_binfo = TREE_VEC_ELT (binfos, i);
tree nrval;
if (TREE_VIA_VIRTUAL (base_binfo))
base_binfo = binfo_member (BINFO_TYPE (base_binfo),
CLASSTYPE_VBASECLASSES (t));
nrval = get_class_offset_1 (parent, base_binfo, context, t, fndecl);
/* See if we have a new value */
if (nrval && (nrval != error_mark_node || rval==0))
{
/* Only compare if we have two offsets */
if (rval && rval != error_mark_node
&& ! tree_int_cst_equal (nrval, rval))
{
/* Only give error if the two offsets are different */
error ("every virtual function must have a unique final overrider");
cp_error (" found two (or more) `%T' class subobjects in `%T'", context, t);
cp_error (" with virtual `%D' from virtual base class", fndecl);
return rval;
}
rval = nrval;
}
if (rval && BINFO_TYPE (binfo) == context)
{
my_friendly_assert (rval == error_mark_node
|| tree_int_cst_equal (rval, BINFO_OFFSET (binfo)), 999);
rval = BINFO_OFFSET (binfo);
}
}
return rval;
}
/* Get the offset to the CONTEXT subobject that is related to the
given BINFO. */
static tree
get_class_offset (context, t, binfo, fndecl)
tree context, t, binfo, fndecl;
{
tree first_binfo = binfo;
tree offset;
int i;
if (context == t)
return integer_zero_node;
if (BINFO_TYPE (binfo) == context)
return BINFO_OFFSET (binfo);
/* Check less derived binfos first. */
while (BINFO_BASETYPES (binfo)
&& (i=CLASSTYPE_VFIELD_PARENT (BINFO_TYPE (binfo))) != -1)
{
tree binfos = BINFO_BASETYPES (binfo);
binfo = TREE_VEC_ELT (binfos, i);
if (BINFO_TYPE (binfo) == context)
return BINFO_OFFSET (binfo);
}
/* Ok, not found in the less derived binfos, now check the more
derived binfos. */
offset = get_class_offset_1 (first_binfo, TYPE_BINFO (t), context, t, fndecl);
if (offset==0 || TREE_CODE (offset) != INTEGER_CST)
my_friendly_abort (999); /* we have to find it. */
return offset;
}
static void
modify_one_vtable (binfo, t, fndecl, pfn)
tree binfo, t, fndecl, pfn;
@ -2174,16 +2257,7 @@ modify_one_vtable (binfo, t, fndecl, pfn)
tree vfield = CLASSTYPE_VFIELD (t);
tree this_offset;
offset = integer_zero_node;
if (context != t && TYPE_USES_COMPLEX_INHERITANCE (t))
{
offset = virtual_offset (context, CLASSTYPE_VBASECLASSES (t), offset);
if (offset == NULL_TREE)
{
tree binfo = get_binfo (context, t, 0);
offset = BINFO_OFFSET (binfo);
}
}
offset = get_class_offset (context, t, binfo, fndecl);
/* Find the right offset for the this pointer based on the
base class we just found. We have to take into
@ -2193,7 +2267,7 @@ modify_one_vtable (binfo, t, fndecl, pfn)
Also, we want just the delta bewteen the most base class
that we derived this vfield from and us. */
base_offset = size_binop (PLUS_EXPR,
get_derived_offset (binfo),
get_derived_offset (binfo, DECL_CONTEXT (current_fndecl)),
BINFO_OFFSET (binfo));
this_offset = size_binop (MINUS_EXPR, offset, base_offset);
@ -2288,16 +2362,7 @@ fixup_vtable_deltas (binfo, t)
tree vfield = CLASSTYPE_VFIELD (t);
tree this_offset;
offset = integer_zero_node;
if (context != t && TYPE_USES_COMPLEX_INHERITANCE (t))
{
offset = virtual_offset (context, CLASSTYPE_VBASECLASSES (t), offset);
if (offset == NULL_TREE)
{
tree binfo = get_binfo (context, t, 0);
offset = BINFO_OFFSET (binfo);
}
}
offset = get_class_offset (context, t, binfo, fndecl);
/* Find the right offset for the this pointer based on the
base class we just found. We have to take into
@ -2307,7 +2372,7 @@ fixup_vtable_deltas (binfo, t)
Also, we want just the delta bewteen the most base class
that we derived this vfield from and us. */
base_offset = size_binop (PLUS_EXPR,
get_derived_offset (binfo),
get_derived_offset (binfo, DECL_CONTEXT (fndecl)),
BINFO_OFFSET (binfo));
this_offset = size_binop (MINUS_EXPR, offset, base_offset);
@ -2684,6 +2749,7 @@ finish_struct (t, list_of_fieldlists, warn_anon)
will fill in the right line number. (mrs) */
if (DECL_SOURCE_LINE (name))
DECL_SOURCE_LINE (name) = lineno;
CLASSTYPE_SOURCE_LINE (t) = lineno;
}
name = DECL_NAME (name);
}
@ -2713,18 +2779,15 @@ finish_struct (t, list_of_fieldlists, warn_anon)
TYPE_SIZE (t) = NULL_TREE;
CLASSTYPE_GOT_SEMICOLON (t) = 0;
/* A signature type will contain the fields of the signature table.
Therefore, it's not only an interface. */
if (IS_SIGNATURE (t))
#if 0
/* This is in general too late to do this. I moved the main case up to
left_curly, what else needs to move? */
if (! IS_SIGNATURE (t))
{
CLASSTYPE_INTERFACE_ONLY (t) = 0;
SET_CLASSTYPE_INTERFACE_KNOWN (t);
}
else
{
CLASSTYPE_INTERFACE_ONLY (t) = interface_only;
SET_CLASSTYPE_INTERFACE_UNKNOWN_X (t, interface_unknown);
my_friendly_assert (CLASSTYPE_INTERFACE_ONLY (t) == interface_only, 999);
my_friendly_assert (CLASSTYPE_INTERFACE_KNOWN (t) == ! interface_unknown, 999);
}
#endif
if (flag_dossier)
build_t_desc (t, 0);
@ -2787,14 +2850,15 @@ finish_struct (t, list_of_fieldlists, warn_anon)
needs_virtual_dtor = 0;
}
#if 0
/* Both of these should be done before now. */
if (write_virtuals == 3 && CLASSTYPE_INTERFACE_KNOWN (t)
&& ! IS_SIGNATURE (t))
{
CLASSTYPE_INTERFACE_ONLY (t) = interface_only;
CLASSTYPE_VTABLE_NEEDS_WRITING (t) = ! interface_only;
my_friendly_assert (CLASSTYPE_INTERFACE_ONLY (t) == interface_only, 999);
my_friendly_assert (CLASSTYPE_VTABLE_NEEDS_WRITING (t) == ! interface_only, 999);
}
else if (IS_SIGNATURE (t))
CLASSTYPE_VTABLE_NEEDS_WRITING (t) = 0;
#endif
/* The three of these are approximations which may later be
modified. Needed at this point to make add_virtual_function
@ -2951,6 +3015,9 @@ finish_struct (t, list_of_fieldlists, warn_anon)
TREE_TYPE (x) = build_pointer_type (TREE_TYPE (x));
}
if (DECL_NAME (x) == constructor_name (t))
cant_have_default_ctor = cant_synth_copy_ctor = 1;
if (TREE_TYPE (x) == error_mark_node)
continue;
@ -3217,12 +3284,14 @@ finish_struct (t, list_of_fieldlists, warn_anon)
CLASSTYPE_REF_FIELDS_NEED_INIT (t) = ref_sans_init;
CLASSTYPE_ABSTRACT_VIRTUALS (t) = abstract_virtuals;
/* Synthesize any needed methods. Note that methods will be synthesized
for anonymous unions; grok_x_components undoes that. */
if (TYPE_NEEDS_DESTRUCTOR (t) && !TYPE_HAS_DESTRUCTOR (t)
&& !IS_SIGNATURE (t))
{
/* Here we must cons up a destructor on the fly. */
tree dtor = cons_up_default_function (t, name, fields,
needs_virtual_dtor != 0);
tree dtor = cons_up_default_function (t, name, needs_virtual_dtor != 0);
/* If we couldn't make it work, then pretend we didn't need it. */
if (dtor == void_type_node)
@ -3251,9 +3320,6 @@ finish_struct (t, list_of_fieldlists, warn_anon)
TYPE_NEEDS_DESTRUCTOR (t) |= TYPE_HAS_DESTRUCTOR (t);
/* Synthesize any needed methods. Note that methods will be synthesized
for anonymous unions; grok_x_components undoes that. */
if (! fn_fields)
nonprivate_method = 1;
@ -3271,7 +3337,7 @@ finish_struct (t, list_of_fieldlists, warn_anon)
if (! TYPE_HAS_CONSTRUCTOR (t) && ! cant_have_default_ctor
&& ! IS_SIGNATURE (t))
{
tree default_fn = cons_up_default_function (t, name, fields, 2);
tree default_fn = cons_up_default_function (t, name, 2);
TREE_CHAIN (default_fn) = fn_fields;
fn_fields = default_fn;
}
@ -3282,9 +3348,8 @@ finish_struct (t, list_of_fieldlists, warn_anon)
{
/* ARM 12.18: You get either X(X&) or X(const X&), but
not both. --Chip */
tree default_fn =
cons_up_default_function (t, name, fields,
cant_have_const_ctor ? 4 : 3);
tree default_fn = cons_up_default_function (t, name,
3 + cant_have_const_ctor);
TREE_CHAIN (default_fn) = fn_fields;
fn_fields = default_fn;
}
@ -3298,9 +3363,8 @@ finish_struct (t, list_of_fieldlists, warn_anon)
if (! TYPE_HAS_ASSIGN_REF (t) && ! cant_synth_asn_ref
&& ! IS_SIGNATURE (t))
{
tree default_fn =
cons_up_default_function (t, name, fields,
no_const_asn_ref ? 6 : 5);
tree default_fn = cons_up_default_function (t, name,
5 + no_const_asn_ref);
TREE_CHAIN (default_fn) = fn_fields;
fn_fields = default_fn;
}
@ -3351,7 +3415,7 @@ finish_struct (t, list_of_fieldlists, warn_anon)
tree flist = NULL_TREE;
tree name;
enum access_type access = (enum access_type)TREE_PURPOSE(access_decls);
int i = 0;
int i = TREE_VEC_ELT (method_vec, 0) ? 0 : 1;
tree tmp;
if (TREE_CODE (fdecl) == TREE_LIST)
@ -3459,6 +3523,27 @@ finish_struct (t, list_of_fieldlists, warn_anon)
/* Delete all duplicate fields from the fields */
delete_duplicate_fields (fields);
/* Catch function/field name conflict. We don't need to do this for a
signature, since it can only contain the fields constructed in
append_signature_fields. */
if (! IS_SIGNATURE (t))
{
int n_methods = method_vec ? TREE_VEC_LENGTH (method_vec) : 0;
for (x = fields; x; x = TREE_CHAIN (x))
{
tree name = DECL_NAME (x);
int i = /*TREE_VEC_ELT (method_vec, 0) ? 0 : */ 1;
for (; i < n_methods; ++i)
if (DECL_NAME (TREE_VEC_ELT (method_vec, i)) == name)
{
cp_error_at ("data member `%#D' conflicts with", x);
cp_error_at ("function member `%#D'",
TREE_VEC_ELT (method_vec, i));
break;
}
}
}
/* Now we have the final fieldlist for the data fields. Record it,
then lay out the structure or union (including the fields). */
@ -3498,6 +3583,9 @@ finish_struct (t, list_of_fieldlists, warn_anon)
tree uelt = TYPE_FIELDS (TREE_TYPE (field));
for (; uelt; uelt = TREE_CHAIN (uelt))
{
if (TREE_CODE (uelt) != FIELD_DECL)
continue;
DECL_FIELD_CONTEXT (uelt) = DECL_FIELD_CONTEXT (field);
DECL_FIELD_BITPOS (uelt) = DECL_FIELD_BITPOS (field);
}
@ -3552,6 +3640,9 @@ finish_struct (t, list_of_fieldlists, warn_anon)
tree uelt = TYPE_FIELDS (TREE_TYPE (field));
for (; uelt; uelt = TREE_CHAIN (uelt))
{
if (TREE_CODE (uelt) != FIELD_DECL)
continue;
DECL_FIELD_CONTEXT (uelt) = DECL_FIELD_CONTEXT (field);
DECL_FIELD_BITPOS (uelt) = DECL_FIELD_BITPOS (field);
}
@ -4634,11 +4725,42 @@ instantiate_type (lhstype, rhs, complain)
{
elem = get_first_fn (rhs);
while (elem)
if (TREE_TYPE (elem) != lhstype)
if (! comptypes (lhstype, TREE_TYPE (elem), 1))
elem = DECL_CHAIN (elem);
else
return elem;
/* No exact match found, look for a compatible function. */
/* No exact match found, look for a compatible template. */
{
tree save_elem = 0;
for (elem = get_first_fn (rhs); elem; elem = DECL_CHAIN (elem))
if (TREE_CODE (elem) == TEMPLATE_DECL)
{
int n = TREE_VEC_LENGTH (DECL_TEMPLATE_PARMS (elem));
tree *t = (tree *) alloca (sizeof (tree) * n);
int i, d;
i = type_unification (DECL_TEMPLATE_PARMS (elem), t,
TYPE_ARG_TYPES (TREE_TYPE (elem)),
TYPE_ARG_TYPES (lhstype), &d, 0);
if (i == 0)
{
if (save_elem)
{
cp_error ("ambiguous template instantiation converting to `%#T'", lhstype);
return error_mark_node;
}
save_elem = instantiate_template (elem, t);
/* Check the return type. */
if (! comptypes (TREE_TYPE (lhstype),
TREE_TYPE (TREE_TYPE (save_elem)), 1))
save_elem = 0;
}
}
if (save_elem)
return save_elem;
}
/* No match found, look for a compatible function. */
elem = get_first_fn (rhs);
while (elem && ! comp_target_types (lhstype, TREE_TYPE (elem), 1))
elem = DECL_CHAIN (elem);
@ -4660,18 +4782,6 @@ instantiate_type (lhstype, rhs, complain)
}
return error_mark_node;
}
if (TREE_CODE (save_elem) == TEMPLATE_DECL)
{
int ntparms = TREE_VEC_LENGTH
(DECL_TEMPLATE_PARMS (save_elem));
tree *targs = (tree *) alloca (sizeof (tree) * ntparms);
int i, dummy;
i = type_unification
(DECL_TEMPLATE_PARMS (save_elem), targs,
TYPE_ARG_TYPES (TREE_TYPE (save_elem)),
TYPE_ARG_TYPES (lhstype), &dummy, 0);
save_elem = instantiate_template (save_elem, targs);
}
return save_elem;
}
if (complain)
@ -4864,12 +4974,14 @@ instantiate_type (lhstype, rhs, complain)
}
TREE_TYPE (rhs) = lhstype;
lhstype = TREE_TYPE (lhstype);
TREE_OPERAND (rhs, 0)
= instantiate_type (lhstype, TREE_OPERAND (rhs, 0), complain);
if (TREE_OPERAND (rhs, 0) == error_mark_node)
return error_mark_node;
mark_addressable (TREE_OPERAND (rhs, 0));
{
tree fn = instantiate_type (lhstype, TREE_OPERAND (rhs, 0), complain);
if (fn == error_mark_node)
return error_mark_node;
mark_addressable (fn);
TREE_OPERAND (rhs, 0) = fn;
TREE_CONSTANT (rhs) = staticp (fn);
}
return rhs;
case ENTRY_VALUE_EXPR:

View File

@ -502,8 +502,12 @@ struct lang_type
union tree_node *signature;
union tree_node *signature_pointer_to;
union tree_node *signature_reference_to;
int linenum;
};
#define CLASSTYPE_SOURCE_LINE(NODE) (TYPE_LANG_SPECIFIC(NODE)->linenum)
/* Indicates whether or not (and how) a template was expanded for this class.
0=no information yet/non-template class
1=implicit template instantiation
@ -1355,7 +1359,7 @@ extern void check_function_format PROTO((tree, tree, tree));
/* Print an error message for invalid operands to arith operation CODE.
NOP_EXPR is used as a special case (see truthvalue_conversion). */
extern void binary_op_error PROTO((enum tree_code));
extern tree c_build_type_variant PROTO((tree, int, int));
extern tree cp_build_type_variant PROTO((tree, int, int));
extern void c_expand_expr_stmt PROTO((tree));
/* Validate the expression after `case' and apply default promotions. */
extern tree check_case_value PROTO((tree));
@ -2011,7 +2015,7 @@ extern void expand_end_all_catch PROTO((void));
extern void start_catch_block PROTO((tree, tree));
extern void end_catch_block PROTO((void));
extern void expand_throw PROTO((tree));
extern void build_exception_table PROTO((void));
extern int build_exception_table PROTO((void));
extern tree build_throw PROTO((tree));
extern void init_exception_processing PROTO((void));
@ -2091,7 +2095,7 @@ extern void reinit_parse_for_method PROTO((int, tree));
#if 0
extern void reinit_parse_for_block PROTO((int, struct obstack *, int));
#endif
extern tree cons_up_default_function PROTO((tree, tree, tree, int));
extern tree cons_up_default_function PROTO((tree, tree, int));
extern void check_for_missing_semicolon PROTO((tree));
extern void note_got_semicolon PROTO((tree));
extern void note_list_got_semicolon PROTO((tree));
@ -2141,6 +2145,7 @@ extern void clear_anon_parm_name PROTO((void));
extern void do_inline_function_hair PROTO((tree, tree));
/* skip report_type_mismatch */
extern char *build_overload_name PROTO((tree, int, int));
extern tree build_static_name PROTO((tree, tree));
extern tree cplus_exception_name PROTO((tree));
extern tree build_decl_overload PROTO((tree, tree, int));
extern tree build_typename_overload PROTO((tree));
@ -2254,7 +2259,6 @@ extern tree copy_binfo PROTO((tree));
extern tree binfo_value PROTO((tree, tree));
extern tree reverse_path PROTO((tree));
extern tree virtual_member PROTO((tree, tree));
extern tree virtual_offset PROTO((tree, tree, tree));
extern void debug_binfo PROTO((tree));
extern int decl_list_length PROTO((tree));
extern int count_functions PROTO((tree));
@ -2360,7 +2364,7 @@ extern void GNU_xref_end PROTO((int));
extern void GNU_xref_file PROTO((char *));
extern void GNU_xref_start_scope PROTO((HOST_WIDE_INT));
extern void GNU_xref_end_scope PROTO((HOST_WIDE_INT, HOST_WIDE_INT, int, int, int));
extern void GNU_xref_def PROTO((tree, char *));
extern void GNU_xref_ref PROTO((tree, char *));
extern void GNU_xref_decl PROTO((tree, tree));
extern void GNU_xref_call PROTO((tree, char *));
extern void GNU_xref_function PROTO((tree, tree));

View File

@ -304,7 +304,7 @@ build_up_reference (type, arg, flags, checkconst)
/* Pass along const and volatile down into the type. */
if (TYPE_READONLY (type) || TYPE_VOLATILE (type))
target_type = c_build_type_variant (target_type, TYPE_READONLY (type),
target_type = cp_build_type_variant (target_type, TYPE_READONLY (type),
TYPE_VOLATILE (type));
targ = arg;
if (TREE_CODE (targ) == SAVE_EXPR)
@ -425,10 +425,11 @@ build_up_reference (type, arg, flags, checkconst)
break;
case PARM_DECL:
#if 0
if (targ == current_class_decl)
{
error ("address of `this' not available");
#if 0
/* #if 0 */
/* This code makes the following core dump the compiler on a sun4,
if the code below is used.
@ -465,16 +466,18 @@ build_up_reference (type, arg, flags, checkconst)
TREE_ADDRESSABLE (targ) = 1; /* so compiler doesn't die later */
put_var_into_stack (targ);
break;
#else
/* #else */
return error_mark_node;
#endif
/* #endif */
}
#endif
/* Fall through. */
case VAR_DECL:
case CONST_DECL:
if (DECL_REGISTER (targ) && !TREE_ADDRESSABLE (targ))
warning ("address needed to build reference for `%s', which is declared `register'",
IDENTIFIER_POINTER (DECL_NAME (targ)));
if (DECL_REGISTER (targ) && !TREE_ADDRESSABLE (targ)
&& !DECL_ARTIFICIAL (targ))
cp_warning ("address needed to build reference for `%D', which is declared `register'",
targ);
else if (staticp (targ))
literal_flag = 1;
@ -491,6 +494,8 @@ build_up_reference (type, arg, flags, checkconst)
return rval;
}
case PREINCREMENT_EXPR:
case PREDECREMENT_EXPR:
case MODIFY_EXPR:
case INIT_EXPR:
{
@ -631,7 +636,7 @@ convert_to_reference (reftype, expr, convtype, flags, decl)
{
int r = TREE_READONLY (expr);
int v = TREE_THIS_VOLATILE (expr);
ttr = c_build_type_variant (TREE_TYPE (expr), r, v);
ttr = cp_build_type_variant (TREE_TYPE (expr), r, v);
}
if (! lvalue_p (expr) &&
@ -1206,6 +1211,9 @@ cp_convert (type, expr, convtype, flags)
else if (TREE_CODE (TREE_TYPE (e)) == REFERENCE_TYPE)
e = convert_from_reference (e);
if (TREE_CODE (e) == OFFSET_REF)
e = resolve_offset_ref (e);
if (TREE_READONLY_DECL_P (e))
e = decl_constant_value (e);
@ -1223,18 +1231,13 @@ cp_convert (type, expr, convtype, flags)
if (flag_pedantic_errors)
return error_mark_node;
}
if (form == OFFSET_TYPE)
cp_error_at ("pointer-to-member expression object not composed with type `%D' object",
TYPE_NAME (TYPE_OFFSET_BASETYPE (intype)));
else if (IS_AGGR_TYPE (intype))
if (IS_AGGR_TYPE (intype))
{
tree rval;
rval = build_type_conversion (CONVERT_EXPR, type, e, 1);
if (rval) return rval;
if (code == BOOLEAN_TYPE)
cp_error ("`%#T' used where a `bool' was expected", intype);
else
cp_error ("`%#T' used where an `int' was expected", intype);
if (rval)
return rval;
cp_error ("`%#T' used where a `%T' was expected", intype, type);
return error_mark_node;
}
if (code == BOOLEAN_TYPE)
@ -2021,11 +2024,16 @@ type_promotes_to (type)
wider. */
else if (TREE_CODE (type) == ENUMERAL_TYPE
|| type == wchar_type_node)
type = type_for_size
(MAX (TYPE_PRECISION (type), TYPE_PRECISION (integer_type_node)),
(flag_traditional
|| (TYPE_PRECISION (type) >= TYPE_PRECISION (integer_type_node)))
&& TREE_UNSIGNED (type));
{
int precision = MAX (TYPE_PRECISION (type),
TYPE_PRECISION (integer_type_node));
tree totype = type_for_size (precision, 0);
if (TREE_UNSIGNED (type)
&& ! int_fits_type_p (TYPE_MAX_VALUE (type), totype))
type = type_for_size (precision, 1);
else
type = totype;
}
else if (C_PROMOTING_INTEGER_TYPE_P (type))
{
/* Traditionally, unsignedness is preserved in default promotions.
@ -2040,5 +2048,5 @@ type_promotes_to (type)
else if (type == float_type_node)
type = double_type_node;
return c_build_type_variant (type, constp, volatilep);
return cp_build_type_variant (type, constp, volatilep);
}

View File

@ -162,6 +162,8 @@ tree wchar_type_node;
tree signed_wchar_type_node;
tree unsigned_wchar_type_node;
tree wchar_decl_node;
tree float_type_node;
tree double_type_node;
tree long_double_type_node;
@ -401,6 +403,11 @@ extern int flag_short_double;
extern int flag_no_builtin;
/* Nonzero means don't recognize the non-ANSI builtin functions.
-ansi sets this. */
extern int flag_no_nonansi_builtin;
/* Nonzero means disable GNU extensions. */
extern int flag_ansi;
@ -1743,6 +1750,7 @@ pushtag (name, type, globalize)
#else
d = build_decl (TYPE_DECL, name, type);
#endif
SET_DECL_ARTIFICIAL (d);
#ifdef DWARF_DEBUGGING_INFO
if (write_symbols == DWARF_DEBUG)
{
@ -1778,6 +1786,7 @@ pushtag (name, type, globalize)
/* Make nested declarations go into class-level scope. */
newdecl = 1;
d = build_decl (TYPE_DECL, name, type);
SET_DECL_ARTIFICIAL (d);
#ifdef DWARF_DEBUGGING_INFO
if (write_symbols == DWARF_DEBUG)
{
@ -1981,11 +1990,11 @@ decls_match (newdecl, olddecl)
for (i = 0; i < len; i++)
{
tree newarg = TREE_VEC_ELT (newargs, i);
tree oldarg = TREE_VEC_ELT (oldargs, i);
tree newarg = TREE_VALUE (TREE_VEC_ELT (newargs, i));
tree oldarg = TREE_VALUE (TREE_VEC_ELT (oldargs, i));
if (TREE_CODE (newarg) != TREE_CODE (oldarg))
return 0;
else if (TREE_CODE (newarg) == IDENTIFIER_NODE)
else if (TREE_CODE (newarg) == TYPE_DECL)
/* continue */;
else if (! comptypes (TREE_TYPE (newarg), TREE_TYPE (oldarg), 1))
return 0;
@ -2104,6 +2113,7 @@ duplicate_decls (newdecl, olddecl)
after implicit decl. */
;
else if (TREE_CODE (olddecl) == FUNCTION_DECL
&& DECL_ARTIFICIAL (olddecl)
&& (DECL_BUILT_IN (olddecl) || DECL_BUILT_IN_NONANSI (olddecl)))
{
/* If you declare a built-in or predefined function name as static,
@ -2188,9 +2198,8 @@ duplicate_decls (newdecl, olddecl)
newdecl);
cp_error_at ("previous declaration `%#D' here", olddecl);
}
if (compparms (TYPE_ARG_TYPES (TREE_TYPE (newdecl)),
TYPE_ARG_TYPES (TREE_TYPE (olddecl)), 2))
else if (compparms (TYPE_ARG_TYPES (TREE_TYPE (newdecl)),
TYPE_ARG_TYPES (TREE_TYPE (olddecl)), 2))
{
cp_error ("new declaration `%#D'", newdecl);
cp_error_at ("ambiguates old declaration `%#D'", olddecl);
@ -2199,16 +2208,21 @@ duplicate_decls (newdecl, olddecl)
return 0;
}
if (olddecl == wchar_decl_node)
{
if (pedantic && ! DECL_IN_SYSTEM_HEADER (newdecl))
cp_pedwarn ("redeclaration of wchar_t as `%T'",
TREE_TYPE (newdecl));
/* Throw away the redeclaration. */
return 1;
}
/* Already complained about this, so don't do so again. */
else if (current_class_type == NULL_TREE
|| IDENTIFIER_ERROR_LOCUS (DECL_ASSEMBLER_NAME (newdecl)) != current_class_type)
{
/* Since we're doing this before finish_struct can set the
line number on NEWDECL, we just do a regular error here. */
if (DECL_SOURCE_LINE (newdecl) == 0)
cp_error ("conflicting types for `%#D'", newdecl);
else
cp_error_at ("conflicting types for `%#D'", newdecl);
cp_error ("conflicting types for `%#D'", newdecl);
cp_error_at ("previous declaration as `%#D'", olddecl);
}
}
@ -2521,12 +2535,15 @@ duplicate_decls (newdecl, olddecl)
DECL_TEMPLATE_MEMBERS (newdecl) = DECL_TEMPLATE_MEMBERS (olddecl);
DECL_TEMPLATE_INSTANTIATIONS (newdecl)
= DECL_TEMPLATE_INSTANTIATIONS (olddecl);
if (DECL_CHAIN (newdecl) == NULL_TREE)
DECL_CHAIN (newdecl) = DECL_CHAIN (olddecl);
}
/* Now preserve various other info from the definition. */
TREE_ADDRESSABLE (newdecl) = TREE_ADDRESSABLE (olddecl);
TREE_ASM_WRITTEN (newdecl) = TREE_ASM_WRITTEN (olddecl);
DECL_COMMON (newdecl) = DECL_COMMON (olddecl);
DECL_ASSEMBLER_NAME (newdecl) = DECL_ASSEMBLER_NAME (olddecl);
/* Don't really know how much of the language-specific
values we should copy from old to new. */
@ -2678,7 +2695,8 @@ pushdecl (x)
/* don't do anything just yet */;
else if (TREE_CODE (t) != TREE_CODE (x))
{
if (TREE_CODE (t) == TYPE_DECL || TREE_CODE (x) == TYPE_DECL)
if ((TREE_CODE (t) == TYPE_DECL && DECL_ARTIFICIAL (t))
|| (TREE_CODE (x) == TYPE_DECL && DECL_ARTIFICIAL (x)))
{
/* We do nothing special here, because C++ does such nasty
things with TYPE_DECLs. Instead, just let the TYPE_DECL
@ -3892,7 +3910,9 @@ lookup_name_real (name, prefer_type, nonclass)
if (got_scope != NULL_TREE)
{
if (got_scope == void_type_node)
if (got_scope == error_mark_node)
return error_mark_node;
else if (got_scope == void_type_node)
val = IDENTIFIER_GLOBAL_VALUE (name);
else if (TREE_CODE (got_scope) == TEMPLATE_TYPE_PARM
/* TFIXME -- don't do this for UPTs in new model. */
@ -4540,14 +4560,29 @@ init_decl_processing ()
sizetype,
endlink)),
BUILT_IN_ALLOCA, "alloca");
#if 0
builtin_function ("alloca",
build_function_type (ptr_type_node,
tree_cons (NULL_TREE,
sizetype,
endlink)),
BUILT_IN_ALLOCA, NULL_PTR);
#endif
/* Define alloca, ffs as builtins.
Declare _exit just to mark it as volatile. */
if (! flag_no_builtin && !flag_no_nonansi_builtin)
{
temp = builtin_function ("alloca",
build_function_type (ptr_type_node,
tree_cons (NULL_TREE,
sizetype,
endlink)),
BUILT_IN_ALLOCA, NULL_PTR);
/* Suppress error if redefined as a non-function. */
DECL_BUILT_IN_NONANSI (temp) = 1;
temp = builtin_function ("ffs", int_ftype_int, BUILT_IN_FFS, NULL_PTR);
/* Suppress error if redefined as a non-function. */
DECL_BUILT_IN_NONANSI (temp) = 1;
temp = builtin_function ("_exit", build_function_type (void_type_node,
int_endlink),
NOT_BUILT_IN, NULL_PTR);
TREE_THIS_VOLATILE (temp) = 1;
TREE_SIDE_EFFECTS (temp) = 1;
/* Suppress error if redefined as a non-function. */
DECL_BUILT_IN_NONANSI (temp) = 1;
}
builtin_function ("__builtin_abs", int_ftype_int,
BUILT_IN_ABS, NULL_PTR);
@ -4647,6 +4682,23 @@ init_decl_processing ()
builtin_function ("strlen", sizet_ftype_string, BUILT_IN_STRLEN, NULL_PTR);
builtin_function ("sin", double_ftype_double, BUILT_IN_SIN, NULL_PTR);
builtin_function ("cos", double_ftype_double, BUILT_IN_COS, NULL_PTR);
/* Declare these functions volatile
to avoid spurious "control drops through" warnings. */
temp = builtin_function ("abort",
build_function_type (void_type_node, endlink),
NOT_BUILT_IN, NULL_PTR);
TREE_THIS_VOLATILE (temp) = 1;
TREE_SIDE_EFFECTS (temp) = 1;
/* Well, these are actually ANSI, but we can't set DECL_BUILT_IN on
them... */
DECL_BUILT_IN_NONANSI (temp) = 1;
temp = builtin_function ("exit", build_function_type (void_type_node,
int_endlink),
NOT_BUILT_IN, NULL_PTR);
TREE_THIS_VOLATILE (temp) = 1;
TREE_SIDE_EFFECTS (temp) = 1;
DECL_BUILT_IN_NONANSI (temp) = 1;
}
#if 0
@ -4708,6 +4760,11 @@ init_decl_processing ()
: signed_wchar_type_node;
record_builtin_type (RID_WCHAR, "__wchar_t", wchar_type_node);
/* Artificial declaration of wchar_t -- can be bashed */
wchar_decl_node = build_decl (TYPE_DECL, get_identifier ("wchar_t"),
wchar_type_node);
pushdecl (wchar_decl_node);
/* This is for wide string constants. */
wchar_array_type_node
= build_array_type (wchar_type_node, array_domain_type);
@ -4758,7 +4815,7 @@ init_decl_processing ()
vtbl_type_node
= build_array_type (vtable_entry_type, NULL_TREE);
layout_type (vtbl_type_node);
vtbl_type_node = c_build_type_variant (vtbl_type_node, 1, 0);
vtbl_type_node = cp_build_type_variant (vtbl_type_node, 1, 0);
record_builtin_type (RID_MAX, NULL_PTR, vtbl_type_node);
/* Simplify life by making a "sigtable_entry_type". Give its
@ -4928,7 +4985,7 @@ init_decl_processing ()
NOT_BUILT_IN);
abort_fndecl
= define_function ("abort",
= define_function ("__pure_virtual",
build_function_type (void_type_node, void_list_node),
NOT_BUILT_IN, 0, 0);
@ -5010,7 +5067,6 @@ shadow_tag (declspecs)
tree declspecs;
{
int found_tag = 0;
int warned = 0;
tree ob_modifier = NULL_TREE;
register tree link;
register enum tree_code code, ok_code = ERROR_MARK;
@ -5023,41 +5079,14 @@ shadow_tag (declspecs)
code = TREE_CODE (value);
if (IS_AGGR_TYPE_CODE (code) || code == ENUMERAL_TYPE)
{
register tree name = TYPE_NAME (value);
my_friendly_assert (TYPE_NAME (value) != NULL_TREE, 261);
if (code == ENUMERAL_TYPE && TYPE_SIZE (value) == 0)
cp_error ("forward declaration of `%#T'", value);
if (name == NULL_TREE)
name = lookup_tag_reverse (value, NULL_TREE);
if (name && TREE_CODE (name) == TYPE_DECL)
name = DECL_NAME (name);
t = lookup_tag (code, name, inner_binding_level, 1);
if (t == NULL_TREE)
{
push_obstacks (&permanent_obstack, &permanent_obstack);
if (IS_AGGR_TYPE_CODE (code))
t = make_lang_type (code);
else
t = make_node (code);
pushtag (name, t, 0);
pop_obstacks ();
ok_code = code;
}
else if (name != NULL_TREE || code == ENUMERAL_TYPE)
ok_code = code;
if (ok_code != ERROR_MARK)
found_tag++;
else
{
if (!warned)
pedwarn ("useless keyword or type name in declaration");
warned = 1;
}
t = value;
ok_code = code;
found_tag++;
}
else if (value == ridpointers[(int) RID_STATIC]
|| value == ridpointers[(int) RID_EXTERN]
@ -5097,32 +5126,9 @@ shadow_tag (declspecs)
cp_error ("`%D' can only be specified for objects and functions",
ob_modifier);
if (ok_code == RECORD_TYPE
&& found_tag == 1
&& TYPE_LANG_SPECIFIC (t)
&& CLASSTYPE_DECLARED_EXCEPTION (t))
{
if (TYPE_SIZE (t))
cp_error ("redeclaration of exception `%T'", t);
else
{
tree ename, decl;
push_obstacks (&permanent_obstack, &permanent_obstack);
pushclass (t, 0);
ename = TYPE_NAME (t);
if (TREE_CODE (ename) == TYPE_DECL)
ename = DECL_NAME (ename);
decl = build_lang_field_decl (VAR_DECL, ename, t);
pop_obstacks ();
}
}
else if (found_tag == 0)
if (found_tag == 0)
pedwarn ("abstract declarator used as declaration");
else if (!warned && found_tag > 1)
else if (found_tag > 1)
pedwarn ("multiple types in one declaration");
}
}
@ -5255,8 +5261,7 @@ start_decl (declarator, declspecs, initialized, raises)
if (interface_unknown && flag_external_templates
&& ! DECL_IN_SYSTEM_HEADER (decl))
warn_if_unknown_interface ();
TREE_PUBLIC (d) = TREE_PUBLIC (decl) =
flag_external_templates && !interface_unknown;
TREE_PUBLIC (d) = TREE_PUBLIC (decl);
TREE_STATIC (d) = TREE_STATIC (decl);
DECL_EXTERNAL (d) = (DECL_EXTERNAL (decl)
&& !(context && !DECL_THIS_EXTERN (decl)));
@ -5714,6 +5719,14 @@ finish_decl (decl, init, asmspec_tree, need_pop)
type = TREE_TYPE (decl);
if (type == error_mark_node)
{
if (current_binding_level == global_binding_level && temporary)
end_temporary_allocation ();
return;
}
was_incomplete = (DECL_SIZE (decl) == NULL_TREE);
/* Take care of TYPE_DECLs up front. */
@ -5808,6 +5821,10 @@ finish_decl (decl, init, asmspec_tree, need_pop)
else if (TREE_CODE (type) == REFERENCE_TYPE
|| (TYPE_LANG_SPECIFIC (type) && IS_SIGNATURE_REFERENCE (type)))
{
if (TREE_STATIC (decl))
make_decl_rtl (decl, NULL_PTR,
current_binding_level == global_binding_level
|| pseudo_global_level_p ());
grok_reference_init (decl, type, init, &cleanup);
init = NULL_TREE;
}
@ -6315,9 +6332,11 @@ finish_decl (decl, init, asmspec_tree, need_pop)
expand_aggr_init (decl, init, 0);
}
/* Set this to 0 so we can tell whether an aggregate
which was initialized was ever used. */
if (TYPE_NEEDS_CONSTRUCTING (type))
/* Set this to 0 so we can tell whether an aggregate which
was initialized was ever used. Don't do this if it has a
destructor, so we don't complain about the 'resource
allocation is initialization' idiom. */
if (TYPE_NEEDS_CONSTRUCTING (type) && cleanup == NULL_TREE)
TREE_USED (decl) = 0;
/* Store the cleanup, if there was one. */
@ -6792,6 +6811,7 @@ grokvardecl (type, declarator, specbits, initialized)
decl = build_lang_field_decl (VAR_DECL, declarator, type);
DECL_CONTEXT (decl) = basetype;
DECL_CLASS_CONTEXT (decl) = basetype;
DECL_ASSEMBLER_NAME (decl) = build_static_name (basetype, declarator);
}
else
decl = build_decl (VAR_DECL, declarator, type);
@ -6857,6 +6877,8 @@ build_ptrmemfunc_type (type)
/* Let the front-end know this is a pointer to member function. */
TYPE_PTRMEMFUNC_FLAG(t) = 1;
/* and not really an aggregate. */
IS_AGGR_TYPE (t) = 0;
fields[0] = build_lang_field_decl (FIELD_DECL, delta_identifier,
delta_type_node);
@ -7091,7 +7113,11 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
dname = decl;
decl = NULL_TREE;
if (IDENTIFIER_OPNAME_P (dname))
if (! IDENTIFIER_OPNAME_P (dname)
/* Linux headers use '__op'. Arrgh. */
|| IDENTIFIER_TYPENAME_P (dname) && ! TREE_TYPE (dname))
name = IDENTIFIER_POINTER (dname);
else
{
if (IDENTIFIER_TYPENAME_P (dname))
{
@ -7102,8 +7128,6 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
}
name = operator_name_string (dname);
}
else
name = IDENTIFIER_POINTER (dname);
break;
case RECORD_TYPE:
@ -7281,7 +7305,7 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
goto found;
}
for (i = (int) RID_FIRST_MODIFIER; i < (int) RID_MAX; i++)
for (i = (int) RID_FIRST_MODIFIER; i <= (int) RID_LAST_MODIFIER; i++)
{
if (ridpointers[i] == id)
{
@ -7521,10 +7545,13 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
volatilep = !! RIDBIT_SETP (RID_VOLATILE, specbits) + TYPE_VOLATILE (type);
staticp = 0;
inlinep = !! RIDBIT_SETP (RID_INLINE, specbits);
#if 0
/* This sort of redundancy is blessed in a footnote to the Sep 94 WP. */
if (constp > 1)
warning ("duplicate `const'");
if (volatilep > 1)
warning ("duplicate `volatile'");
#endif
virtualp = RIDBIT_SETP (RID_VIRTUAL, specbits);
if (RIDBIT_SETP (RID_STATIC, specbits))
@ -7688,17 +7715,13 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
break;
}
if (scanner == IDENTIFIER_AS_LIST (ridpointers [(int) RID_TYPEDEF]))
{
if (previous_declspec)
TREE_CHAIN (previous_declspec)
= IDENTIFIER_AS_LIST (ridpointers [(int) RID_STATIC]);
else
declspecs
= IDENTIFIER_AS_LIST (ridpointers [(int) RID_STATIC]);
}
if (previous_declspec)
TREE_CHAIN (previous_declspec) = TREE_CHAIN (scanner);
else
TREE_VALUE (scanner) = ridpointers[(int) RID_STATIC];
declspecs = TREE_CHAIN (scanner);
declspecs = tree_cons (NULL_TREE, ridpointers[(int) RID_STATIC],
declspecs);
/* In the recursive call to grokdeclarator we need to know
whether we are working on a signature-local typedef. */
@ -7707,6 +7730,9 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
loc_typedecl =
grokdeclarator (declarator, declspecs, FIELD, 0, NULL_TREE);
if (previous_declspec)
TREE_CHAIN (previous_declspec) = scanner;
if (loc_typedecl != error_mark_node)
{
@ -7714,6 +7740,9 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
register int *pi;
TREE_SET_CODE (loc_typedecl, TYPE_DECL);
/* This is the same field as DECL_ARGUMENTS, which is set for
function typedefs by the above grokdeclarator. */
DECL_NESTED_TYPENAME (loc_typedecl) = 0;
pi = (int *) permalloc (sizeof (struct lang_decl_flags));
while (i > 0)
@ -7983,7 +8012,7 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
type = build_cplus_array_type (type, itype);
if (constp || volatilep)
type = c_build_type_variant (type, constp, volatilep);
type = cp_build_type_variant (type, constp, volatilep);
ctype = NULL_TREE;
}
@ -7992,6 +8021,9 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
case CALL_EXPR:
{
tree arg_types;
int funcdecl_p;
tree inner_parms = TREE_OPERAND (declarator, 1);
tree inner_decl = TREE_OPERAND (declarator, 0);
/* Declaring a function type.
Make sure we have a valid type for the function to return. */
@ -8005,7 +8037,7 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
if (constp || volatilep)
{
type = c_build_type_variant (type, constp, volatilep);
type = cp_build_type_variant (type, constp, volatilep);
if (IS_AGGR_TYPE (type))
build_pointer_type (type);
constp = 0;
@ -8026,8 +8058,17 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
type = integer_type_node;
}
if (inner_decl && TREE_CODE (inner_decl) == SCOPE_REF)
inner_decl = TREE_OPERAND (inner_decl, 1);
/* Say it's a definition only for the CALL_EXPR
closest to the identifier. */
funcdecl_p =
inner_decl && TREE_CODE (inner_decl) == IDENTIFIER_NODE;
if (ctype == NULL_TREE
&& decl_context == FIELD
&& funcdecl_p
&& (friendp == 0 || dname == current_class_name))
ctype = current_class_type;
@ -8141,27 +8182,12 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
/* Construct the function type and go to the next
inner layer of declarator. */
{
int funcdef_p;
tree inner_parms = TREE_OPERAND (declarator, 1);
tree inner_decl = TREE_OPERAND (declarator, 0);
declarator = TREE_OPERAND (declarator, 0);
declarator = TREE_OPERAND (declarator, 0);
/* FIXME: This is where default args should be fully
processed. */
if (inner_decl && TREE_CODE (inner_decl) == SCOPE_REF)
inner_decl = TREE_OPERAND (inner_decl, 1);
/* Say it's a definition only for the CALL_EXPR
closest to the identifier. */
funcdef_p =
(inner_decl && TREE_CODE (inner_decl) == IDENTIFIER_NODE)
? funcdef_flag : 0;
/* FIXME: This is where default args should be fully
processed. */
arg_types = grokparms (inner_parms, funcdef_p);
}
arg_types = grokparms (inner_parms, funcdecl_p ? funcdef_flag : 0);
if (declarator)
{
@ -8216,7 +8242,7 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
signature pointer/reference itself. */
if (! IS_SIGNATURE (type))
{
type = c_build_type_variant (type, constp, volatilep);
type = cp_build_type_variant (type, constp, volatilep);
if (IS_AGGR_TYPE (type))
build_pointer_type (type);
constp = 0;
@ -8511,7 +8537,7 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
/* Note that the grammar rejects storage classes
in typenames, fields or parameters. */
if (constp || volatilep)
type = c_build_type_variant (type, constp, volatilep);
type = cp_build_type_variant (type, constp, volatilep);
/* If the user declares "struct {...} foo" then `foo' will have
an anonymous name. Fill that name in now. Nothing can
@ -8600,7 +8626,7 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
if (IS_SIGNATURE (type))
error ("`const' or `volatile' specified with signature type");
else
type = c_build_type_variant (type, constp, volatilep);
type = cp_build_type_variant (type, constp, volatilep);
/* Special case: "friend class foo" looks like a TYPENAME context. */
if (friendp)
@ -8682,7 +8708,7 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
{
/* Transfer const-ness of array into that of type pointed to. */
type = build_pointer_type
(c_build_type_variant (TREE_TYPE (type), constp, volatilep));
(cp_build_type_variant (TREE_TYPE (type), constp, volatilep));
volatilep = constp = 0;
}
else if (TREE_CODE (type) == FUNCTION_TYPE)
@ -8961,14 +8987,10 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
type = build_cplus_method_type (build_type_variant (ctype, constp, volatilep),
TREE_TYPE (type), TYPE_ARG_TYPES (type));
/* Record presence of `static'. In C++, `inline' is like `static'.
Methods of classes should be public, unless we're dropping them
into some other file, so we don't clear TREE_PUBLIC for them. */
/* Record presence of `static'. In C++, `inline' is like `static'. */
publicp
= ((ctype
&& CLASSTYPE_INTERFACE_KNOWN (ctype))
|| !(RIDBIT_SETP (RID_STATIC, specbits)
|| RIDBIT_SETP (RID_INLINE, specbits)));
= !(RIDBIT_SETP (RID_STATIC, specbits)
|| RIDBIT_SETP (RID_INLINE, specbits));
decl = grokfndecl (ctype, type, original_name,
virtualp, flags, quals,
@ -8989,7 +9011,7 @@ grokdeclarator (declarator, declspecs, decl_context, initialized, raises)
declaring main to be static. */
if (TREE_CODE (type) == METHOD_TYPE)
{
cp_error_at ("cannot declare member function `%D' to have static linkage", decl);
cp_pedwarn ("cannot declare member function `%D' to have static linkage", decl);
illegal_static = 1;
}
else if (! ctype
@ -9791,6 +9813,7 @@ xref_defn_tag (code_type_node, name, binfo)
if (! ANON_AGGRNAME_P (name))
{
register tree type_decl = build_decl (TYPE_DECL, ncp, rv);
SET_DECL_ARTIFICIAL (type_decl);
#ifdef DWARF_DEBUGGING_INFO
/* Mark the TYPE_DECL node created just above as a gratuitous one
so that dwarfout.c will know not to generate a TAG_typedef DIE
@ -9939,48 +9962,21 @@ xref_tag (code_type_node, name, binfo, globalize)
}
else
{
extern tree pending_vtables;
struct binding_level *old_b = class_binding_level;
int needs_writing;
ref = make_lang_type (code);
/* A signature type will contain the fields of the signature
table. Therefore, it's not only an interface. */
if (tag_code == signature_type)
{
SET_SIGNATURE (ref);
/* Since a signature type will be turned into the type
of signature tables, it's not only an interface. */
CLASSTYPE_INTERFACE_ONLY (ref) = 0;
CLASSTYPE_INTERFACE_UNKNOWN (ref) = 0;
SET_CLASSTYPE_INTERFACE_KNOWN (ref);
/* A signature doesn't have a vtable. */
CLASSTYPE_VTABLE_NEEDS_WRITING (ref) = 0;
}
/* Record how to set the access of this class's
virtual functions. If write_virtuals == 2 or 3, then
inline virtuals are ``extern inline''. */
switch (write_virtuals)
{
case 0:
case 1:
needs_writing = 1;
break;
case 2:
needs_writing = !! value_member (name, pending_vtables);
break;
case 3:
needs_writing = ! CLASSTYPE_INTERFACE_ONLY (ref)
&& CLASSTYPE_INTERFACE_KNOWN (ref);
break;
default:
needs_writing = 0;
}
/* Signatures don't have a vtable. As long as we don't have default
implementations, they behave as if `write_virtuals' were 3. */
if (tag_code == signature_type)
CLASSTYPE_VTABLE_NEEDS_WRITING (ref) = 0;
else
CLASSTYPE_VTABLE_NEEDS_WRITING (ref) = needs_writing;
#ifdef NONNESTED_CLASSES
/* Class types don't nest the way enums do. */
class_binding_level = (struct binding_level *)0;
@ -10108,6 +10104,7 @@ xref_tag (code_type_node, name, binfo, globalize)
TREE_VIA_PUBLIC (base_binfo) = via_public;
TREE_VIA_PROTECTED (base_binfo) = via_protected;
TREE_VIA_VIRTUAL (base_binfo) = via_virtual;
BINFO_INHERITANCE_CHAIN (base_binfo) = TYPE_BINFO (ref);
SET_CLASSTYPE_MARKED (basetype);
#if 0
@ -10209,6 +10206,7 @@ start_enum (name)
TREE_ADDRESSABLE (b->tags) = 1;
current_local_enum = NULL_TREE;
#if 0 /* This stuff gets cleared in finish_enum anyway. */
if (TYPE_VALUES (enumtype) != NULL_TREE)
/* Completely replace its old definition.
The old enumerators remain defined, however. */
@ -10221,7 +10219,8 @@ start_enum (name)
TYPE_PRECISION (enumtype) = TYPE_PRECISION (integer_type_node);
TYPE_SIZE (enumtype) = NULL_TREE;
fixup_unsigned_type (enumtype);
fixup_signed_type (enumtype);
#endif
/* We copy this value because enumerated type constants
are really of the type of the enumerator, not integer_type_node. */
@ -10241,84 +10240,89 @@ tree
finish_enum (enumtype, values)
register tree enumtype, values;
{
register tree pair, tem;
register HOST_WIDE_INT maxvalue = 0;
register HOST_WIDE_INT minvalue = 0;
register HOST_WIDE_INT i;
register tree minnode, maxnode;
/* Calculate the maximum value of any enumerator in this type. */
if (values)
{
register tree pair;
register tree value = DECL_INITIAL (TREE_VALUE (values));
/* Speed up the main loop by performing some precalculations */
HOST_WIDE_INT value;
TREE_TYPE (TREE_VALUE (values)) = enumtype;
TREE_TYPE (DECL_INITIAL (TREE_VALUE (values))) = enumtype;
TREE_VALUE (values) = DECL_INITIAL (TREE_VALUE (values));
value = TREE_INT_CST_LOW (TREE_VALUE (values));
minvalue = maxvalue = value;
TREE_TYPE (value) = enumtype;
TREE_VALUE (values) = value;
minnode = maxnode = value;
for (pair = TREE_CHAIN (values); pair; pair = TREE_CHAIN (pair))
{
value = DECL_INITIAL (TREE_VALUE (pair));
TREE_TYPE (TREE_VALUE (pair)) = enumtype;
TREE_TYPE (DECL_INITIAL (TREE_VALUE (pair))) = enumtype;
TREE_VALUE (pair) = DECL_INITIAL (TREE_VALUE (pair));
value = TREE_INT_CST_LOW (TREE_VALUE (pair));
if (value > maxvalue)
maxvalue = value;
else if (value < minvalue)
minvalue = value;
TREE_TYPE (value) = enumtype;
TREE_VALUE (pair) = value;
if (tree_int_cst_lt (maxnode, value))
maxnode = value;
else if (tree_int_cst_lt (value, minnode))
minnode = value;
}
}
else
maxnode = minnode = integer_zero_node;
TYPE_VALUES (enumtype) = values;
if (flag_short_enums)
{
/* Determine the precision this type needs, lay it out, and define
it. */
{
int unsignedp = tree_int_cst_sgn (minnode) >= 0;
int lowprec = min_precision (minnode, unsignedp);
int highprec = min_precision (maxnode, unsignedp);
int precision = MAX (lowprec, highprec);
/* First reset precision */
TYPE_PRECISION (enumtype) = 0;
if (! flag_short_enums && precision < TYPE_PRECISION (integer_type_node))
precision = TYPE_PRECISION (integer_type_node);
for (i = maxvalue; i; i >>= 1)
TYPE_PRECISION (enumtype)++;
if (!TYPE_PRECISION (enumtype))
TYPE_PRECISION (enumtype) = 1;
/* Cancel the laying out previously done for the enum type,
so that fixup_unsigned_type will do it over. */
TYPE_SIZE (enumtype) = NULL_TREE;
/*
* The following code is unnecessary since the function
* type_promotes_to deals correctly with promotion of enums of
* underlying unsigned types to signed integer types.
* Moreover, it causes an enum bitfield to require one more bit of
* storage than defined by the ANSI/ISO C++ resolution section r.7.2
* which defines the range of an enum.
*/
#if 0
/* Unlike the C frontend, we prefer signed types. */
if (unsignedp && int_fits_type_p (maxnode, type_for_size (precision, 0)))
unsignedp = 0;
#endif
TYPE_PRECISION (enumtype) = precision;
TYPE_SIZE (enumtype) = NULL_TREE;
if (unsignedp)
fixup_unsigned_type (enumtype);
}
else
fixup_signed_type (enumtype);
}
TREE_INT_CST_LOW (TYPE_MAX_VALUE (enumtype)) = maxvalue;
/* An enum can have some negative values; then it is signed. */
if (minvalue < 0)
{
TREE_INT_CST_LOW (TYPE_MIN_VALUE (enumtype)) = minvalue;
TREE_INT_CST_HIGH (TYPE_MIN_VALUE (enumtype)) = -1;
TREE_UNSIGNED (enumtype) = 0;
}
if (flag_cadillac)
cadillac_finish_enum (enumtype);
/* Fix up all variant types of this enum type. */
for (tem = TYPE_MAIN_VARIANT (enumtype); tem; tem = TYPE_NEXT_VARIANT (tem))
{
TYPE_VALUES (tem) = TYPE_VALUES (enumtype);
TYPE_MIN_VALUE (tem) = TYPE_MIN_VALUE (enumtype);
TYPE_MAX_VALUE (tem) = TYPE_MAX_VALUE (enumtype);
TYPE_SIZE (tem) = TYPE_SIZE (enumtype);
TYPE_MODE (tem) = TYPE_MODE (enumtype);
TYPE_PRECISION (tem) = TYPE_PRECISION (enumtype);
TYPE_ALIGN (tem) = TYPE_ALIGN (enumtype);
TREE_UNSIGNED (tem) = TREE_UNSIGNED (enumtype);
}
{
register tree tem;
/* Fix up all variant types of this enum type. */
for (tem = TYPE_MAIN_VARIANT (enumtype); tem;
tem = TYPE_NEXT_VARIANT (tem))
{
TYPE_VALUES (tem) = TYPE_VALUES (enumtype);
TYPE_MIN_VALUE (tem) = TYPE_MIN_VALUE (enumtype);
TYPE_MAX_VALUE (tem) = TYPE_MAX_VALUE (enumtype);
TYPE_SIZE (tem) = TYPE_SIZE (enumtype);
TYPE_MODE (tem) = TYPE_MODE (enumtype);
TYPE_PRECISION (tem) = TYPE_PRECISION (enumtype);
TYPE_ALIGN (tem) = TYPE_ALIGN (enumtype);
TREE_UNSIGNED (tem) = TREE_UNSIGNED (enumtype);
}
}
/* Finish debugging output for this type. */
#if 0
@ -10678,15 +10682,14 @@ start_function (declspecs, declarator, raises, pre_parsed_p)
/* If this function belongs to an interface, it is public.
If it belongs to someone else's interface, it is also external.
It doesn't matter whether it's inline or not. */
if (interface_unknown == 0)
if (interface_unknown == 0
&& ! TREE_PUBLIC (decl1))
{
TREE_PUBLIC (decl1) = 1;
DECL_EXTERNAL (decl1)
= (interface_only
|| (DECL_INLINE (decl1) && ! flag_implement_inlines));
}
else if (DECL_EXPLICIT_INSTANTIATION (decl1))
/* PUBLIC and EXTERNAL set by do_*_instantiation */;
else
{
/* This is a definition, not a reference.
@ -10695,7 +10698,10 @@ start_function (declspecs, declarator, raises, pre_parsed_p)
defining how to inline. So set DECL_EXTERNAL in that case. */
DECL_EXTERNAL (decl1) = current_extern_inline;
DECL_DEFER_OUTPUT (decl1) = DECL_INLINE (decl1);
DECL_DEFER_OUTPUT (decl1)
= DECL_INLINE (decl1) && ! TREE_PUBLIC (decl1)
&& (DECL_FUNCTION_MEMBER_P (decl1)
|| DECL_TEMPLATE_INSTANTIATION (decl1));
}
if (ctype != NULL_TREE && DECL_STATIC_FUNCTION_P (decl1))
@ -11023,59 +11029,6 @@ store_return_init (return_id, init)
}
}
#if 0
/* Generate code for default X() constructor. */
static void
build_default_constructor (fndecl)
tree fndecl;
{
int i = CLASSTYPE_N_BASECLASSES (current_class_type);
tree parm = TREE_CHAIN (DECL_ARGUMENTS (fndecl));
tree fields = TYPE_FIELDS (current_class_type);
tree binfos = TYPE_BINFO_BASETYPES (current_class_type);
if (TYPE_USES_VIRTUAL_BASECLASSES (current_class_type))
parm = TREE_CHAIN (parm);
parm = DECL_REFERENCE_SLOT (parm);
while (--i >= 0)
{
tree basetype = TREE_VEC_ELT (binfos, i);
if (TYPE_HAS_INIT_REF (basetype))
{
tree name = TYPE_NAME (basetype);
if (TREE_CODE (name) == TYPE_DECL)
name = DECL_NAME (name);
current_base_init_list = tree_cons (name, parm, current_base_init_list);
}
}
for (; fields; fields = TREE_CHAIN (fields))
{
tree name, init;
if (TREE_STATIC (fields))
continue;
if (TREE_CODE (fields) != FIELD_DECL)
continue;
if (DECL_NAME (fields))
{
if (VFIELD_NAME_P (DECL_NAME (fields)))
continue;
if (VBASE_NAME_P (DECL_NAME (fields)))
continue;
/* True for duplicate members. */
if (IDENTIFIER_CLASS_VALUE (DECL_NAME (fields)) != fields)
continue;
}
init = build (COMPONENT_REF, TREE_TYPE (fields), parm, fields);
init = build_tree_list (NULL_TREE, init);
current_member_init_list
= tree_cons (DECL_NAME (fields), init, current_member_init_list);
}
}
#endif
/* Finish up a function declaration and compile that function
all the way to assembler language output. The free the storage
@ -11529,20 +11482,19 @@ finish_function (lineno, call_poplevel)
/* So we can tell if jump_optimize sets it to 1. */
can_reach_end = 0;
/* ??? Compensate for Sun brain damage in dealing with data segments
of PIC code. */
if (flag_pic
&& (DECL_CONSTRUCTOR_P (fndecl)
|| DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (fndecl)))
&& CLASSTYPE_NEEDS_VIRTUAL_REINIT (TYPE_METHOD_BASETYPE (fntype)))
DECL_INLINE (fndecl) = 0;
if (DECL_EXTERNAL (fndecl)
/* This function is just along for the ride. If we can make
it inline, that's great. Otherwise, just punt it. */
&& (DECL_INLINE (fndecl) == 0
|| flag_no_inline
|| function_cannot_inline_p (fndecl)))
|| function_cannot_inline_p (fndecl)
/* ??? Compensate for Sun brain damage in dealing with
data segments of PIC code. */
|| (flag_pic
&& (DECL_CONSTRUCTOR_P (fndecl)
|| DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (fndecl)))
&& CLASSTYPE_NEEDS_VIRTUAL_REINIT (TYPE_METHOD_BASETYPE (fntype)))))
{
extern int rtl_dump_and_exit;
int old_rtl_dump_and_exit = rtl_dump_and_exit;
@ -11564,8 +11516,8 @@ finish_function (lineno, call_poplevel)
rest_of_compilation (fndecl);
}
if (DECL_INLINE (fndecl)
&& !TREE_ASM_WRITTEN (fndecl) && DECL_FUNCTION_MEMBER_P (fndecl))
if (DECL_INLINE (fndecl) && !TREE_ASM_WRITTEN (fndecl)
&& DECL_DEFER_OUTPUT (fndecl))
{
mark_inline_for_output (fndecl);
}

View File

@ -84,6 +84,11 @@ int flag_no_asm;
int flag_no_builtin;
/* Nonzero means don't recognize the non-ANSI builtin functions.
-ansi sets this. */
int flag_no_nonansi_builtin;
/* Nonzero means do some things the same way PCC does. */
int flag_traditional;
@ -207,6 +212,12 @@ int warn_nonvdtor;
/* Non-zero means warn when a function is declared extern and later inline. */
int warn_extern_inline;
/* Non-zero means warn when the compiler will reorder code. */
int warn_reorder;
/* Non-zero means warn when sysnthesis behavior differs from Cfront's. */
int warn_synth;
/* Nonzero means `$' can be in an identifier.
See cccp.c for reasons why this breaks some obscure ANSI C programs. */
@ -320,6 +331,9 @@ int flag_huge_objects;
definitions. */
int flag_conserve_space;
/* Nonzero if we want to obey access control semantics. */
int flag_access_control = 1;
/* Table of language-dependent -f options.
STRING is the option name. VARIABLE is the address of the variable.
ON_VALUE is the value to store in VARIABLE
@ -361,6 +375,8 @@ static struct { char *string; int *variable; int on_value;} lang_f_options[] =
{"conserve-space", &flag_conserve_space, 1},
{"vtable-thunks", &flag_vtable_thunks, 1},
{"short-temps", &flag_short_temps, 1},
{"access-control", &flag_access_control, 1},
{"nonansi-builtins", &flag_no_nonansi_builtin, 0}
};
/* Decode the string P as a language-specific option.
@ -447,6 +463,10 @@ lang_decode_option (p)
flag_alt_external_templates = 0;
found = 1;
}
else if (!strcmp (p, "ansi-overloading"))
{
warning ("-fansi-overloading is no longer meaningful");
}
else for (j = 0;
!found && j < sizeof (lang_f_options) / sizeof (lang_f_options[0]);
j++)
@ -511,6 +531,10 @@ lang_decode_option (p)
warn_nonvdtor = setting;
else if (!strcmp (p, "extern-inline"))
warn_extern_inline = setting;
else if (!strcmp (p, "reorder"))
warn_reorder = setting;
else if (!strcmp (p, "synth"))
warn_synth = setting;
else if (!strcmp (p, "comment"))
; /* cpp handles this one. */
else if (!strcmp (p, "comments"))
@ -537,6 +561,7 @@ lang_decode_option (p)
if (warn_uninitialized != 1)
warn_uninitialized = (setting ? 2 : 0);
warn_template_debugging = setting;
warn_reorder = setting;
}
else if (!strcmp (p, "overloaded-virtual"))
@ -544,7 +569,8 @@ lang_decode_option (p)
else return 0;
}
else if (!strcmp (p, "-ansi"))
flag_no_asm = 1, dollars_in_ident = 0, flag_ansi = 1;
flag_no_asm = 1, dollars_in_ident = 0, flag_no_nonansi_builtin = 1,
flag_ansi = 1;
#ifdef SPEW_DEBUG
/* Undocumented, only ever used when you're invoking cc1plus by hand, since
it's probably safe to assume no sane person would ever want to use this
@ -983,7 +1009,7 @@ grok_array_decl (array_expr, index_exp)
if (TYPE_LANG_SPECIFIC (type)
&& TYPE_OVERLOADS_ARRAY_REF (type))
return build_opfncall (ARRAY_REF, LOOKUP_NORMAL,
array_expr, index_exp, NULL_TREE);
array_expr, index_exp, NULL_TREE);
/* Otherwise, create an ARRAY_REF for a pointer or array type. */
if (TREE_CODE (type) == POINTER_TYPE
@ -1000,17 +1026,14 @@ grok_array_decl (array_expr, index_exp)
|| TREE_CODE (type) == REFERENCE_TYPE)
type = TREE_TYPE (type);
if (TYPE_LANG_SPECIFIC (type)
&& TYPE_OVERLOADS_ARRAY_REF (type))
error ("array expression backwards");
else if (TREE_CODE (type) == POINTER_TYPE
|| TREE_CODE (type) == ARRAY_TYPE)
if (TREE_CODE (type) == POINTER_TYPE
|| TREE_CODE (type) == ARRAY_TYPE)
return build_array_ref (index_exp, array_expr);
else
error("`[]' applied to non-pointer type");
/* We gave an error, so give an error. Huh? */
return error_mark_node;
/* The expression E1[E2] is identical (by definition) to *((E1)+(E2)). */
return build_indirect_ref (build_binary_op (PLUS_EXPR, array_expr,
index_exp, 1),
"array indexing");
}
/* Given the cast expression EXP, checking out its validity. Either return
@ -1057,11 +1080,20 @@ delete_sanity (exp, size, doing_vec, use_global_delete)
return build1 (NOP_EXPR, void_type_node, t);
}
/* You can't delete a pointer to constant. */
if (code == POINTER_TYPE && TREE_READONLY (TREE_TYPE (type)))
if (code == POINTER_TYPE)
{
error ("`const *' cannot be deleted");
return error_mark_node;
/* You can't delete a pointer to constant. */
if (TREE_READONLY (TREE_TYPE (type)))
{
error ("`const *' cannot be deleted");
return error_mark_node;
}
/* You also can't delete functions. */
if (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
{
error ("cannot delete a function");
return error_mark_node;
}
}
#if 0
@ -1128,13 +1160,13 @@ check_classfn (ctype, cname, function)
}
if (methods != end)
cp_error ("argument list for `%D' does not match any in class `%T'",
fn_name, ctype);
cp_error ("argument list for `%#D' does not match any in class `%T'",
function, ctype);
else
{
methods = 0;
cp_error ("no `%D' member function declared in class `%T'",
fn_name, ctype);
cp_error ("no `%#D' member function declared in class `%T'",
function, ctype);
}
/* If we did not find the method in the class, add it to
@ -1301,19 +1333,10 @@ grokfield (declarator, declspecs, raises, init, asmspec_tree)
/* current_class_type can be NULL_TREE in case of error. */
if (asmspec == 0 && current_class_type)
{
tree name;
char *buf, *buf2;
buf2 = build_overload_name (current_class_type, 1, 1);
buf = (char *)alloca (IDENTIFIER_LENGTH (DECL_NAME (value))
+ sizeof (STATIC_NAME_FORMAT)
+ strlen (buf2));
sprintf (buf, STATIC_NAME_FORMAT, buf2,
IDENTIFIER_POINTER (DECL_NAME (value)));
name = get_identifier (buf);
TREE_PUBLIC (value) = 1;
DECL_INITIAL (value) = error_mark_node;
DECL_ASSEMBLER_NAME (value) = name;
DECL_ASSEMBLER_NAME (value)
= build_static_name (current_class_type, DECL_NAME (value));
}
pending_statics = perm_tree_cons (NULL_TREE, value, pending_statics);
@ -1366,9 +1389,11 @@ grokfield (declarator, declspecs, raises, init, asmspec_tree)
if (DECL_FRIEND_P (value))
return void_type_node;
#if 0 /* Just because a fn is declared doesn't mean we'll try to define it. */
if (current_function_decl && ! IS_SIGNATURE (current_class_type))
cp_error ("method `%#D' of local class must be defined in class body",
value);
#endif
DECL_IN_AGGR_P (value) = 1;
return value;
@ -2067,9 +2092,13 @@ finish_anon_union (anon_union_decl)
return;
}
while (field)
for (; field; field = TREE_CHAIN (field))
{
tree decl = build_decl (VAR_DECL, DECL_NAME (field), TREE_TYPE (field));
tree decl;
if (TREE_CODE (field) != FIELD_DECL)
continue;
decl = build_decl (VAR_DECL, DECL_NAME (field), TREE_TYPE (field));
/* tell `pushdecl' that this is not tentative. */
DECL_INITIAL (decl) = error_mark_node;
TREE_PUBLIC (decl) = public_p;
@ -2096,12 +2125,19 @@ finish_anon_union (anon_union_decl)
TREE_PURPOSE of the following TREE_LIST. */
elems = tree_cons (NULL_TREE, decl, elems);
TREE_TYPE (elems) = type;
field = TREE_CHAIN (field);
}
if (static_p)
{
make_decl_rtl (main_decl, 0, global_bindings_p ());
DECL_RTL (anon_union_decl) = DECL_RTL (main_decl);
if (main_decl)
{
make_decl_rtl (main_decl, 0, global_bindings_p ());
DECL_RTL (anon_union_decl) = DECL_RTL (main_decl);
}
else
{
warning ("anonymous union with no members");
return;
}
}
/* The following call assumes that there are never any cleanups
@ -2522,6 +2558,37 @@ walk_sigtables (typedecl_fn, vardecl_fn)
}
}
/* Determines the proper settings of TREE_PUBLIC and DECL_EXTERNAL for an
inline function at end-of-file. */
void
import_export_inline (decl)
tree decl;
{
if (TREE_PUBLIC (decl))
return;
/* If an explicit instantiation doesn't have TREE_PUBLIC set, it was with
'extern'. */
if (DECL_EXPLICIT_INSTANTIATION (decl)
|| (DECL_IMPLICIT_INSTANTIATION (decl) && ! flag_implicit_templates))
{
TREE_PUBLIC (decl) = 1;
DECL_EXTERNAL (decl) = 1;
}
else if (DECL_FUNCTION_MEMBER_P (decl))
{
tree ctype = DECL_CLASS_CONTEXT (decl);
if (CLASSTYPE_INTERFACE_KNOWN (ctype))
{
TREE_PUBLIC (decl) = 1;
DECL_EXTERNAL (decl)
= (CLASSTYPE_INTERFACE_ONLY (ctype)
|| (DECL_INLINE (decl) && ! flag_implement_inlines));
}
}
}
extern int parse_time, varconst_time;
#define TIMEVAR(VAR, BODY) \
@ -2541,8 +2608,7 @@ finish_file ()
tree fnname;
tree vars = static_aggregates;
int needs_cleaning = 0, needs_messing_up = 0;
build_exception_table ();
int have_exception_handlers = build_exception_table ();
if (flag_detailed_statistics)
dump_tree_statistics ();
@ -2636,7 +2702,7 @@ finish_file ()
mess_up:
/* Must do this while we think we are at the top level. */
vars = nreverse (static_aggregates);
if (vars != NULL_TREE)
if (vars != NULL_TREE || have_exception_handlers)
{
fnname = get_file_function_name ('I');
start_function (void_list_node, build_parse_node (CALL_EXPR, fnname, void_list_node, NULL_TREE), 0, 0);
@ -2648,6 +2714,9 @@ finish_file ()
push_momentary ();
expand_start_bindings (0);
if (have_exception_handlers)
register_exception_table ();
while (vars)
{
tree decl = TREE_VALUE (vars);
@ -2712,7 +2781,6 @@ finish_file ()
}
}
if (IS_AGGR_TYPE (TREE_TYPE (decl))
|| init == 0
|| TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
expand_aggr_init (decl, init, 0);
else if (TREE_CODE (init) == TREE_VEC)
@ -2828,24 +2896,12 @@ finish_file ()
0; don't crash. */
if (TREE_ASM_WRITTEN (decl) || DECL_SAVED_INSNS (decl) == 0)
continue;
if (DECL_FUNCTION_MEMBER_P (decl) && !TREE_PUBLIC (decl))
{
tree ctype = DECL_CLASS_CONTEXT (decl);
if (CLASSTYPE_INTERFACE_KNOWN (ctype))
{
TREE_PUBLIC (decl) = 1;
DECL_EXTERNAL (decl)
= (CLASSTYPE_INTERFACE_ONLY (ctype)
|| (DECL_INLINE (decl) && ! flag_implement_inlines));
}
}
import_export_inline (decl);
if (TREE_PUBLIC (decl)
|| TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
|| flag_keep_inline_functions)
{
if (DECL_EXTERNAL (decl)
|| (DECL_IMPLICIT_INSTANTIATION (decl)
&& ! flag_implicit_templates))
if (DECL_EXTERNAL (decl))
assemble_external (decl);
else
{
@ -2872,9 +2928,7 @@ finish_file ()
if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
&& ! TREE_ASM_WRITTEN (decl))
{
if (DECL_EXTERNAL (decl)
|| (DECL_IMPLICIT_INSTANTIATION (decl)
&& ! flag_implicit_templates))
if (DECL_EXTERNAL (decl))
assemble_external (decl);
else
{

View File

@ -30,6 +30,11 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
typedef char* cp_printer PROTO((HOST_WIDE_INT, int));
extern cp_printer * cp_printers[256];
/* Whether or not we should try to be quiet for errors and warnings; this is
used to avoid being too talkative about problems with tentative choices
when we're computing the conversion costs for a method call. */
int cp_silent = 0;
typedef void errorfn (); /* deliberately vague */
extern char* cp_file_of PROTO((tree));
@ -150,7 +155,8 @@ cp_error (format, arglist)
arglist_dcl
{
extern errorfn error;
cp_thing (error, 0, format, arglist);
if (! cp_silent)
cp_thing (error, 0, format, arglist);
}
void
@ -159,7 +165,8 @@ cp_warning (format, arglist)
arglist_dcl
{
extern errorfn warning;
cp_thing (warning, 0, format, arglist);
if (! cp_silent)
cp_thing (warning, 0, format, arglist);
}
void
@ -168,7 +175,8 @@ cp_pedwarn (format, arglist)
arglist_dcl
{
extern errorfn pedwarn;
cp_thing (pedwarn, 0, format, arglist);
if (! cp_silent)
cp_thing (pedwarn, 0, format, arglist);
}
void
@ -177,7 +185,8 @@ cp_compiler_error (format, arglist)
arglist_dcl
{
extern errorfn compiler_error;
cp_thing (compiler_error, 0, format, arglist);
if (! cp_silent)
cp_thing (compiler_error, 0, format, arglist);
}
void
@ -195,7 +204,8 @@ cp_error_at (format, arglist)
arglist_dcl
{
extern errorfn error_with_file_and_line;
cp_thing (error_with_file_and_line, 1, format, arglist);
if (! cp_silent)
cp_thing (error_with_file_and_line, 1, format, arglist);
}
void
@ -204,7 +214,8 @@ cp_warning_at (format, arglist)
arglist_dcl
{
extern errorfn warning_with_file_and_line;
cp_thing (warning_with_file_and_line, 1, format, arglist);
if (! cp_silent)
cp_thing (warning_with_file_and_line, 1, format, arglist);
}
void
@ -213,5 +224,6 @@ cp_pedwarn_at (format, arglist)
arglist_dcl
{
extern errorfn pedwarn_with_file_and_line;
cp_thing (pedwarn_with_file_and_line, 1, format, arglist);
if (! cp_silent)
cp_thing (pedwarn_with_file_and_line, 1, format, arglist);
}

View File

@ -104,6 +104,8 @@ dump_readonly_or_volatile (t, p)
if (p == before) OB_PUTC (' ');
if (TYPE_READONLY (t))
OB_PUTS ("const");
if (TYPE_READONLY (t) && TYPE_VOLATILE (t))
OB_PUTC (' ');
if (TYPE_VOLATILE (t))
OB_PUTS ("volatile");
if (p == after) OB_PUTC (' ');
@ -569,7 +571,8 @@ dump_decl (t, v)
{
/* Don't say 'typedef class A' */
tree type = TREE_TYPE (t);
if (IS_AGGR_TYPE (type) && ! TYPE_PTRMEMFUNC_P (type)
if (((IS_AGGR_TYPE (type) && ! TYPE_PTRMEMFUNC_P (type))
|| TREE_CODE (type) == ENUMERAL_TYPE)
&& type == TYPE_MAIN_VARIANT (type))
{
dump_type (type, v);
@ -666,21 +669,31 @@ dump_decl (t, v)
case TEMPLATE_DECL:
{
tree args = DECL_TEMPLATE_PARMS (t);
int i, len = TREE_VEC_LENGTH (args);
int i, len = args ? TREE_VEC_LENGTH (args) : 0;
OB_PUTS ("template <");
for (i = 0; i < len; i++)
{
tree arg = TREE_VEC_ELT (args, i);
if (TREE_CODE (arg) == IDENTIFIER_NODE)
tree defval = TREE_PURPOSE (arg);
arg = TREE_VALUE (arg);
if (TREE_CODE (arg) == TYPE_DECL)
{
OB_PUTS ("class ");
OB_PUTID (arg);
OB_PUTID (DECL_NAME (arg));
}
else
dump_decl (arg, 1);
if (defval)
{
OB_PUTS (" = ");
dump_decl (defval, 1);
}
OB_PUTC2 (',', ' ');
}
OB_UNPUT (2);
if (len != 0)
OB_UNPUT (2);
OB_PUTC2 ('>', ' ');
if (DECL_TEMPLATE_IS_CLASS (t))
@ -1219,6 +1232,14 @@ dump_expr (t, nop)
break;
}
case TREE_LIST:
if (TREE_VALUE (t) && TREE_CODE (TREE_VALUE (t)) == FUNCTION_DECL)
{
OB_PUTID (DECL_NAME (TREE_VALUE (t)));
break;
}
/* else fall through */
/* This list is incomplete, but should suffice for now.
It is very important that `sorry' does not call
`report_error_function'. That could cause an infinite loop. */
@ -1331,12 +1352,26 @@ int
cp_line_of (t)
tree t;
{
int line = 0;
if (TREE_CODE (t) == PARM_DECL)
return DECL_SOURCE_LINE (DECL_CONTEXT (t));
else if (TREE_CODE_CLASS (TREE_CODE (t)) == 't')
return DECL_SOURCE_LINE (TYPE_NAME (t));
line = DECL_SOURCE_LINE (DECL_CONTEXT (t));
if (TREE_CODE (t) == TYPE_DECL && DECL_ARTIFICIAL (t))
t = TREE_TYPE (t);
if (TREE_CODE_CLASS (TREE_CODE (t)) == 't')
{
if (IS_AGGR_TYPE (t))
line = CLASSTYPE_SOURCE_LINE (t);
else
line = DECL_SOURCE_LINE (TYPE_NAME (t));
}
else
return DECL_SOURCE_LINE (t);
line = DECL_SOURCE_LINE (t);
if (line == 0)
return lineno;
return line;
}
char *

View File

@ -58,11 +58,6 @@ sorry_no_eh ()
}
}
void
build_exception_table ()
{
}
void
expand_exception_blocks ()
{
@ -180,29 +175,9 @@ output_exception_table_entry (file, start_label, end_label, eh_label)
{
char label[100];
fprintf (file, "\t%s\t ", ASM_LONG);
if (GET_CODE (start_label) == CODE_LABEL)
{
ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (start_label));
assemble_name (file, label);
}
else if (GET_CODE (start_label) == SYMBOL_REF)
{
fprintf (stderr, "YYYYYYYYYEEEEEEEESSSSSSSSSSSS!!!!!!!!!!\n");
assemble_name (file, XSTR (start_label, 0));
}
putc ('\n', file);
fprintf (file, "\t%s\t ", ASM_LONG);
ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (end_label));
assemble_name (file, label);
putc ('\n', file);
fprintf (file, "\t%s\t ", ASM_LONG);
ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (eh_label));
assemble_name (file, label);
putc ('\n', file);
assemble_integer (start_label, BITS_PER_WORD/BITS_PER_UNIT, 1);
assemble_integer (end_label, BITS_PER_WORD/BITS_PER_UNIT, 1);
assemble_integer (eh_label, BITS_PER_WORD/BITS_PER_UNIT, 1);
putc ('\n', file); /* blank line */
}
@ -1161,13 +1136,12 @@ expand_start_catch_block (declspecs, declarator)
else
type = NULL_TREE;
false_label_rtx = gen_label_rtx ();
push_label_entry (&false_label_stack, false_label_rtx);
/* This is saved for the exception table. */
/* These are saved for the exception table. */
push_rtl_perm ();
false_label_rtx = gen_label_rtx ();
protect_label_rtx = gen_label_rtx ();
pop_rtl_from_perm ();
push_label_entry (&false_label_stack, false_label_rtx);
push_label_entry (&false_label_stack, protect_label_rtx);
if (type)
@ -1436,40 +1410,63 @@ expand_throw (exp)
emit_jump (throw_label);
}
/* output the exception table */
void
build_exception_table ()
{
extern FILE *asm_out_file;
struct ehEntry *entry;
if (! doing_eh (0))
return;
exception_section ();
/* Beginning marker for table. */
fprintf (asm_out_file, " .global ___EXCEPTION_TABLE__\n");
fprintf (asm_out_file, " .align 4\n");
fprintf (asm_out_file, "___EXCEPTION_TABLE__:\n");
fprintf (asm_out_file, " .word 0, 0, 0\n");
while (entry = dequeue_eh_entry (&eh_table_output_queue)) {
output_exception_table_entry (asm_out_file,
entry->start_label, entry->end_label, entry->exception_handler_label);
}
/* Ending marker for table. */
fprintf (asm_out_file, " .global ___EXCEPTION_END__\n");
fprintf (asm_out_file, "___EXCEPTION_END__:\n");
fprintf (asm_out_file, " .word -1, -1, -1\n");
}
/* end of: my-cp-except.c */
#endif
/* Output the exception table.
Return the number of handlers. */
int
build_exception_table ()
{
int count = 0;
#ifdef TRY_NEW_EH
extern FILE *asm_out_file;
struct ehEntry *entry;
tree eh_node_decl;
if (! doing_eh (0))
return 0;
while (entry = dequeue_eh_entry (&eh_table_output_queue))
{
if (count == 0)
{
exception_section ();
/* Beginning marker for table. */
ASM_OUTPUT_ALIGN (asm_out_file, 2);
ASM_OUTPUT_LABEL (asm_out_file, "__EXCEPTION_TABLE__");
fprintf (asm_out_file, " .word 0, 0, 0\n");
}
count++;
output_exception_table_entry (asm_out_file,
entry->start_label, entry->end_label,
entry->exception_handler_label);
}
if (count)
{
/* Ending marker for table. */
ASM_OUTPUT_LABEL (asm_out_file, "__EXCEPTION_END__");
fprintf (asm_out_file, " .word -1, -1, -1\n");
}
#endif /* TRY_NEW_EH */
return count;
}
void
register_exception_table ()
{
#ifdef TRY_NEW_EH
emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__register_exceptions"), 0,
VOIDmode, 1,
gen_rtx (SYMBOL_REF, Pmode, "__EXCEPTION_TABLE__"),
Pmode);
#endif /* TRY_NEW_EH */
}
/* Build a throw expression. */
tree
build_throw (e)

View File

@ -148,9 +148,16 @@ cplus_expand_expr (exp, target, tmode, modifier)
{
if (pcc_struct_return)
{
extern int flag_access_control;
int old_ac = flag_access_control;
tree init = build (RTL_EXPR, type, 0, return_target);
TREE_ADDRESSABLE (init) = 1;
flag_access_control = 0;
expand_aggr_init (slot, init, 0);
flag_access_control = old_ac;
if (TYPE_NEEDS_DESTRUCTOR (type))
{
init = build (RTL_EXPR, build_reference_type (type), 0,

View File

@ -217,6 +217,7 @@ perform_member_init (member, name, init, explicit)
expand_expr_stmt (build_modify_expr (decl, INIT_EXPR, init));
}
}
expand_cleanups_to (NULL_TREE);
if (flag_handle_exceptions && TYPE_NEEDS_DESTRUCTOR (type))
cp_warning ("caution, member `%D' may not be destroyed in the presense of an exception during construction", member);
}
@ -226,15 +227,19 @@ static tree
sort_member_init (t)
tree t;
{
extern int warn_reorder;
tree x, member, name, field, init;
tree init_list = NULL_TREE;
tree fields_to_unmark = NULL_TREE;
int found;
int last_pos = 0;
tree last_field;
for (member = TYPE_FIELDS (t); member ; member = TREE_CHAIN (member))
{
int pos;
found = 0;
for (x = current_member_init_list ; x ; x = TREE_CHAIN (x))
for (x = current_member_init_list, pos = 0; x; x = TREE_CHAIN (x), ++pos)
{
/* If we cleared this out, then pay no attention to it. */
if (TREE_PURPOSE (x) == NULL_TREE)
@ -264,6 +269,17 @@ sort_member_init (t)
field);
continue;
}
else
{
if (pos < last_pos && warn_reorder)
{
cp_warning_at ("member initializers for `%#D'", last_field);
cp_warning_at (" and `%#D'", field);
warning (" will be re-ordered to match declaration order");
}
last_pos = pos;
last_field = field;
}
init_list = chainon (init_list,
build_tree_list (name, TREE_VALUE (x)));
@ -500,9 +516,10 @@ emit_base_init (t, immediately)
continue;
member = convert_pointer_to (binfo, current_class_decl);
expand_aggr_init_1 (t_binfo, 0,
expand_aggr_init_1 (binfo, 0,
build_indirect_ref (member, NULL_PTR), init,
BINFO_OFFSET_ZEROP (binfo), LOOKUP_COMPLAIN);
BINFO_OFFSET_ZEROP (binfo), LOOKUP_NORMAL);
expand_cleanups_to (NULL_TREE);
}
if (pass == 0)
@ -568,9 +585,10 @@ emit_base_init (t, immediately)
current_class_decl, BINFO_OFFSET (base_binfo));
ref = build_indirect_ref (base, NULL_PTR);
expand_aggr_init_1 (t_binfo, 0, ref, NULL_TREE,
expand_aggr_init_1 (base_binfo, 0, ref, NULL_TREE,
BINFO_OFFSET_ZEROP (base_binfo),
LOOKUP_COMPLAIN);
LOOKUP_NORMAL);
expand_cleanups_to (NULL_TREE);
}
}
CLEAR_BINFO_BASEINIT_MARKED (base_binfo);
@ -655,11 +673,6 @@ emit_base_init (t, immediately)
current_member_init_list = NULL_TREE;
/* It is possible for the initializers to need cleanups.
Expand those cleanups now that all the initialization
has been done. */
expand_cleanups_to (NULL_TREE);
if (! immediately)
{
extern rtx base_init_insns;
@ -734,6 +747,7 @@ expand_aggr_vbase_init_1 (binfo, exp, addr, init_list)
/* Call constructors, but don't set up vtables. */
expand_aggr_init_1 (binfo, exp, ref, init, 0,
LOOKUP_COMPLAIN|LOOKUP_SPECULATIVELY);
expand_cleanups_to (NULL_TREE);
CLEAR_BINFO_VBASE_INIT_MARKED (binfo);
}
@ -1117,7 +1131,11 @@ expand_aggr_init (exp, init, alias_this)
int was_const_elts = TYPE_READONLY (TREE_TYPE (type));
tree itype = init ? TREE_TYPE (init) : NULL_TREE;
if (was_const_elts)
TREE_TYPE (exp) = TYPE_MAIN_VARIANT (type);
{
TREE_TYPE (exp) = TYPE_MAIN_VARIANT (type);
if (init)
TREE_TYPE (init) = TYPE_MAIN_VARIANT (itype);
}
if (init && TREE_TYPE (init) == NULL_TREE)
{
/* Handle bad initializers like:
@ -1139,7 +1157,8 @@ expand_aggr_init (exp, init, alias_this)
init && comptypes (TREE_TYPE (init), TREE_TYPE (exp), 1));
TREE_READONLY (exp) = was_const;
TREE_TYPE (exp) = type;
if (init) TREE_TYPE (init) = itype;
if (init)
TREE_TYPE (init) = itype;
return;
}
@ -1187,6 +1206,7 @@ expand_default_init (binfo, true_exp, exp, type, init, alias_this, flags)
else if (TREE_CODE (init) == INDIRECT_REF && TREE_HAS_CONSTRUCTOR (init))
{
rval = convert_for_initialization (exp, type, init, 0, 0, 0, 0);
TREE_USED (rval) = 1;
expand_expr_stmt (rval);
return;
}
@ -1392,7 +1412,7 @@ expand_aggr_init_1 (binfo, true_exp, exp, init, alias_this, flags)
separately from the object being initialized. */
if (TREE_CODE (init) == TARGET_EXPR)
{
if (init_type == type)
if (TYPE_MAIN_VARIANT (init_type) == TYPE_MAIN_VARIANT (type))
{
if (TREE_CODE (exp) == VAR_DECL
|| TREE_CODE (exp) == RESULT_DECL)
@ -1966,12 +1986,6 @@ build_offset_ref (cname, name)
name, NULL_TREE, 1);
#endif
fnfields = lookup_fnfields (TYPE_BINFO (type), name, 1);
fields = lookup_field (type, name, 0, 0);
if (fields == error_mark_node || fnfields == error_mark_node)
return error_mark_node;
if (current_class_type == 0
|| get_base_distance (type, current_class_type, 0, &basetypes) == -1)
{
@ -1986,6 +2000,12 @@ build_offset_ref (cname, name)
else
decl = C_C_D;
fnfields = lookup_fnfields (basetypes, name, 1);
fields = lookup_field (basetypes, name, 0, 0);
if (fields == error_mark_node || fnfields == error_mark_node)
return error_mark_node;
/* A lot of this logic is now handled in lookup_field and
lookup_fnfield. */
if (fnfields)
@ -2018,7 +2038,6 @@ build_offset_ref (cname, name)
{
extern int flag_save_memoized_contexts;
/* This does not handle access checking yet. */
if (DECL_CHAIN (t) == NULL_TREE || dtor)
{
enum access_type access;
@ -2287,9 +2306,11 @@ resolve_offset_ref (exp)
{
basetype = TYPE_OFFSET_BASETYPE (TREE_TYPE (member));
addr = convert_pointer_to (basetype, addr);
member = convert (ptr_type_node, build_unary_op (ADDR_EXPR, member, 0));
member = convert (ptrdiff_type_node,
build_unary_op (ADDR_EXPR, member, 0));
return build1 (INDIRECT_REF, type,
build (PLUS_EXPR, ptr_type_node, addr, member));
build (PLUS_EXPR, build_pointer_type (type),
addr, member));
}
else if (TYPE_PTRMEMFUNC_P (TREE_TYPE (member)))
{
@ -3261,7 +3282,7 @@ build_new (placement, decl, init, use_global_new)
build_tree_list (NULL_TREE, rval))));
}
return save_expr (rval);
return rval;
}
/* `expand_vec_init' performs initialization of a vector of aggregate

View File

@ -75,6 +75,12 @@ void yyerror ();
struct obstack inline_text_obstack;
static char *inline_text_firstobj;
/* This obstack is used to hold information about methods to be
synthesized. It should go away when synthesized methods are handled
properly (i.e. only when needed). */
struct obstack synth_obstack;
static char *synth_firstobj;
int end_of_file;
/* Pending language change.
@ -564,6 +570,8 @@ init_lex ()
init_error ();
gcc_obstack_init (&inline_text_obstack);
inline_text_firstobj = (char *) obstack_alloc (&inline_text_obstack, 0);
gcc_obstack_init (&synth_obstack);
synth_firstobj = (char *) obstack_alloc (&synth_obstack, 0);
/* Start it at 0, because check_newline is called at the very beginning
and will increment it to 1. */
@ -960,15 +968,6 @@ set_yydebug (value)
#endif
}
#ifdef SPEW_DEBUG
const char *
debug_yytranslate (value)
int value;
{
return yytname[YYTRANSLATE (value)];
}
#endif
/* Functions and data structures for #pragma interface.
@ -1095,30 +1094,72 @@ set_vardecl_interface_info (prev, vars)
void
do_pending_inlines ()
{
struct pending_inline *prev = 0, *tail;
struct pending_inline *t;
/* Oops, we're still dealing with the last batch. */
if (yychar == PRE_PARSED_FUNCTION_DECL)
return;
/* Reverse the pending inline functions, since
they were cons'd instead of appended. */
for (t = pending_inlines; t; t = tail)
{
t->deja_vu = 1;
tail = t->next;
t->next = prev;
prev = t;
}
/* Reset to zero so that if the inline functions we are currently
processing define inline functions of their own, that is handled
correctly. ??? This hasn't been checked in a while. */
pending_inlines = 0;
{
struct pending_inline *prev = 0, *tail, *bottom = 0;
t = pending_inlines;
pending_inlines = 0;
for (; t; t = tail)
{
tail = t->next;
t->next = prev;
t->deja_vu = 1;
prev = t;
}
/* This kludge should go away when synthesized methods are handled
properly, i.e. only when needed. */
for (t = prev; t; t = t->next)
{
if (t->lineno <= 0)
{
tree f = t->fndecl;
DECL_PENDING_INLINE_INFO (f) = 0;
interface_unknown = t->interface == 1;
interface_only = t->interface == 0;
switch (- t->lineno)
{
case 0: case 1:
build_dtor (f); break;
case 2:
build_default_constructor (f); break;
case 3: case 4:
build_copy_constructor (f); break;
case 5: case 6:
build_assign_ref (f); break;
default:
;
}
if (tail)
tail->next = t->next;
else
prev = t->next;
if (! bottom)
bottom = t;
}
else
tail = t;
}
if (bottom)
{
obstack_free (&synth_obstack, bottom);
extract_interface_info ();
}
t = prev;
}
if (t == 0)
return;
/* Now start processing the first inline function. */
t = prev;
my_friendly_assert ((t->parm_vec == NULL_TREE) == (t->bindings == NULL_TREE),
226);
if (t->parm_vec)
@ -1660,8 +1701,8 @@ reinit_parse_for_block (yychar, obstackp, is_template)
When KIND == 6, build default operator = (X&). */
tree
cons_up_default_function (type, name, fields, kind)
tree type, name, fields;
cons_up_default_function (type, name, kind)
tree type, name;
int kind;
{
extern tree void_list_node;
@ -1685,14 +1726,6 @@ cons_up_default_function (type, name, fields, kind)
case 2:
/* Default constructor. */
args = void_list_node;
{
if (declspecs)
declspecs = decl_tree_cons (NULL_TREE,
ridpointers [(int) RID_INLINE],
declspecs);
else
declspecs = build_decl_list (NULL_TREE, ridpointers [(int) RID_INLINE]);
}
break;
case 3:
@ -1700,16 +1733,12 @@ cons_up_default_function (type, name, fields, kind)
/* Fall through... */
case 4:
/* According to ARM $12.8, the default copy ctor will be declared, but
not defined, unless it's needed. So we mark this as `inline'; that
way, if it's never used it won't be emitted. */
declspecs = build_decl_list (NULL_TREE, ridpointers [(int) RID_INLINE]);
not defined, unless it's needed. */
argtype = build_reference_type (type);
args = tree_cons (NULL_TREE,
build_tree_list (hash_tree_chain (argtype, NULL_TREE),
get_identifier ("_ctor_arg")),
void_list_node);
default_copy_constructor_body (&func_buf, &func_len, type, fields);
break;
case 5:
@ -1717,11 +1746,7 @@ cons_up_default_function (type, name, fields, kind)
/* Fall through... */
case 6:
retref = 1;
declspecs =
decl_tree_cons (NULL_TREE, name,
decl_tree_cons (NULL_TREE,
ridpointers [(int) RID_INLINE],
NULL_TREE));
declspecs = build_decl_list (NULL_TREE, name);
name = ansi_opname [(int) MODIFY_EXPR];
@ -1730,19 +1755,14 @@ cons_up_default_function (type, name, fields, kind)
build_tree_list (hash_tree_chain (argtype, NULL_TREE),
get_identifier ("_ctor_arg")),
void_list_node);
default_assign_ref_body (&func_buf, &func_len, type, fields);
break;
default:
my_friendly_abort (59);
}
if (!func_buf)
{
func_len = 2;
func_buf = obstack_alloc (&inline_text_obstack, func_len);
strcpy (func_buf, "{}");
}
declspecs = decl_tree_cons (NULL_TREE, ridpointers [(int) RID_INLINE],
declspecs);
TREE_PARMLIST (args) = 1;
@ -1751,52 +1771,27 @@ cons_up_default_function (type, name, fields, kind)
if (retref)
declarator = build_parse_node (ADDR_EXPR, declarator);
fn = start_method (declspecs, declarator, NULL_TREE);
fn = grokfield (declarator, declspecs, NULL_TREE, NULL_TREE, NULL_TREE);
}
if (fn == void_type_node)
return fn;
current_base_init_list = NULL_TREE;
current_member_init_list = NULL_TREE;
if (CLASSTYPE_TEMPLATE_INSTANTIATION (type))
SET_DECL_IMPLICIT_INSTANTIATION (fn);
/* This kludge should go away when synthesized methods are handled
properly, i.e. only when needed. */
{
struct pending_inline *t;
t = (struct pending_inline *) obstack_alloc (&inline_text_obstack,
sizeof (struct pending_inline));
t->lineno = lineno;
#if 1
t->filename = input_filename;
#else /* This breaks; why? */
#define MGMSG "(synthetic code at) "
t->filename = obstack_alloc (&inline_text_obstack,
strlen (input_filename) + sizeof (MGMSG) + 1);
strcpy (t->filename, MGMSG);
strcat (t->filename, input_filename);
#endif
t->token = YYEMPTY;
t->token_value = 0;
t->buf = func_buf;
t->len = func_len;
t->can_free = 1;
t->deja_vu = 0;
if (interface_unknown && processing_template_defn && flag_external_templates && ! DECL_IN_SYSTEM_HEADER (fn))
warn_if_unknown_interface ();
t = (struct pending_inline *)
obstack_alloc (&synth_obstack, sizeof (struct pending_inline));
t->lineno = -kind;
t->can_free = 0;
t->interface = (interface_unknown ? 1 : (interface_only ? 0 : 2));
store_pending_inline (fn, t);
if (interface_unknown)
TREE_PUBLIC (fn) = 0;
else
{
TREE_PUBLIC (fn) = 1;
DECL_EXTERNAL (fn) = interface_only;
}
}
finish_method (fn);
#ifdef DEBUG_DEFAULT_FUNCTIONS
{ char *fn_type = NULL;
tree t = name;
@ -1818,14 +1813,13 @@ cons_up_default_function (type, name, fields, kind)
}
#endif /* DEBUG_DEFAULT_FUNCTIONS */
DECL_CLASS_CONTEXT (fn) = TYPE_MAIN_VARIANT (type);
/* Show that this function was generated by the compiler. */
SET_DECL_ARTIFICIAL (fn);
return fn;
}
#if 0
/* Used by default_copy_constructor_body. For the anonymous union
in TYPE, return the member that is at least as large as the rest
of the members, so we can copy it. */
@ -2171,6 +2165,7 @@ default_copy_constructor_body (bufp, lenp, type, fields)
strcpy (*bufp, prologue.object_base);
strcat (*bufp, "{}");
}
#endif
/* Heuristic to tell whether the user is missing a semicolon
after a struct or enum declaration. Emit an error message
@ -2182,17 +2177,17 @@ check_for_missing_semicolon (type)
if (yychar < 0)
yychar = yylex ();
if (yychar > 255
&& yychar != SCSPEC
&& yychar != IDENTIFIER
&& yychar != TYPENAME)
if ((yychar > 255
&& yychar != SCSPEC
&& yychar != IDENTIFIER
&& yychar != TYPENAME)
|| end_of_file)
{
if (ANON_AGGRNAME_P (TYPE_IDENTIFIER (type)))
error ("semicolon missing after %s declaration",
TREE_CODE (type) == ENUMERAL_TYPE ? "enum" : "struct");
else
error ("semicolon missing after declaration of `%s'",
TYPE_NAME_STRING (type));
cp_error ("semicolon missing after declaration of `%T'", type);
shadow_tag (build_tree_list (0, type));
}
/* Could probably also hack cases where class { ... } f (); appears. */
@ -2875,7 +2870,7 @@ check_newline ()
if (c_header_level && --c_header_level == 0)
{
if (entering_c_header)
warning ("Badly nested C headers from preprocessor");
warning ("badly nested C headers from preprocessor");
--pending_lang_change;
}
if (flag_cadillac)
@ -4203,12 +4198,12 @@ real_yylex ()
|| ((result >> (num_bits - 1)) & 1) == 0)
yylval.ttype
= build_int_2 (result & ((unsigned HOST_WIDE_INT) ~0
>> (HOST_BITS_PER_INT - num_bits)),
>> (HOST_BITS_PER_WIDE_INT - num_bits)),
0);
else
yylval.ttype
= build_int_2 (result | ~((unsigned HOST_WIDE_INT) ~0
>> (HOST_BITS_PER_INT - num_bits)),
>> (HOST_BITS_PER_WIDE_INT - num_bits)),
-1);
if (num_chars<=1)
TREE_TYPE (yylval.ttype) = char_type_node;

View File

@ -58,14 +58,16 @@ enum rid
RID_VOLATILE,
RID_FRIEND,
RID_VIRTUAL,
RID_SIGNED,
RID_AUTO,
RID_MUTABLE,
/* This is where grokdeclarator ends its search when setting the specbits. */
RID_PUBLIC,
RID_PRIVATE,
RID_PROTECTED,
RID_SIGNED,
RID_EXCEPTION,
RID_RAISES,
RID_AUTO,
RID_MUTABLE,
RID_TEMPLATE,
RID_SIGNATURE,
/* Before adding enough to get up to 64, the RIDBIT_* macros
@ -76,6 +78,7 @@ enum rid
#define NORID RID_UNUSED
#define RID_FIRST_MODIFIER RID_EXTERN
#define RID_LAST_MODIFIER RID_MUTABLE
/* The type that can represent all values of RIDBIT. */
/* We assume that we can stick in at least 32 bits into this. */

View File

@ -313,6 +313,7 @@ flush_repeats (type)
OB_PUTC ('_');
}
static int numeric_outputed_need_bar;
static void build_overload_identifier ();
static void
@ -463,9 +464,9 @@ build_overload_identifier (name)
icat (nparms);
for (i = 0; i < nparms; i++)
{
tree parm = TREE_VEC_ELT (parmlist, i);
tree parm = TREE_VALUE (TREE_VEC_ELT (parmlist, i));
tree arg = TREE_VEC_ELT (arglist, i);
if (TREE_CODE (parm) == IDENTIFIER_NODE)
if (TREE_CODE (parm) == TYPE_DECL)
{
/* This parameter is a type. */
OB_PUTC ('Z');
@ -476,11 +477,17 @@ build_overload_identifier (name)
/* It's a PARM_DECL. */
build_overload_name (TREE_TYPE (parm), 0, 0);
build_overload_value (parm, arg);
numeric_outputed_need_bar = 1;
}
}
}
else
{
if (numeric_outputed_need_bar)
{
OB_PUTC ('_');
numeric_outputed_need_bar = 0;
}
icat (IDENTIFIER_LENGTH (name));
OB_PUTID (name);
}
@ -772,6 +779,7 @@ build_overload_name (parmtypes, begin, end)
icat (i);
if (i > 9)
OB_PUTC ('_');
numeric_outputed_need_bar = 0;
build_overload_nested_name (TYPE_NAME (parmtype));
}
else
@ -817,6 +825,18 @@ build_overload_name (parmtypes, begin, end)
if (end) OB_FINISH ();
return (char *)obstack_base (&scratch_obstack);
}
tree
build_static_name (basetype, name)
tree basetype, name;
{
char *basename = build_overload_name (basetype, 1, 1);
char *buf = (char *) alloca (IDENTIFIER_LENGTH (name)
+ sizeof (STATIC_NAME_FORMAT)
+ strlen (basename));
sprintf (buf, STATIC_NAME_FORMAT, basename, IDENTIFIER_POINTER (name));
return get_identifier (buf);
}
/* Generate an identifier that encodes the (ANSI) exception TYPE. */
@ -852,18 +872,17 @@ build_decl_overload (dname, parms, for_method)
/* member operators new and delete look like methods at this point. */
if (! for_method && parms != NULL_TREE && TREE_CODE (parms) == TREE_LIST)
{
if (TREE_VALUE (parms) == sizetype
&& TREE_CHAIN (parms) == void_list_node)
if (dname == ansi_opname[(int) DELETE_EXPR])
return get_identifier ("__builtin_delete");
else if (dname == ansi_opname[(int) VEC_DELETE_EXPR])
return get_identifier ("__builtin_vec_delete");
else if (TREE_CHAIN (parms) == void_list_node)
{
if (dname == ansi_opname[(int) NEW_EXPR])
return get_identifier ("__builtin_new");
else if (dname == ansi_opname[(int) VEC_NEW_EXPR])
return get_identifier ("__builtin_vec_new");
}
else if (dname == ansi_opname[(int) DELETE_EXPR])
return get_identifier ("__builtin_delete");
else if (dname == ansi_opname[(int) VEC_DELETE_EXPR])
return get_identifier ("__builtin_vec_delete");
}
OB_INIT ();
@ -901,6 +920,7 @@ build_decl_overload (dname, parms, for_method)
{
ALLOCATE_TYPEVEC (parms);
nofold = 0;
numeric_outputed_need_bar = 0;
if (for_method)
{
build_overload_name (TREE_VALUE (parms), 0, 0);
@ -1461,11 +1481,13 @@ hack_identifier (value, name, yychar)
if (really_overloaded_fn (value))
{
tree t = get_first_fn (value);
while (t)
for (; t; t = DECL_CHAIN (t))
{
if (TREE_CODE (t) == TEMPLATE_DECL)
continue;
assemble_external (t);
TREE_USED (t) = 1;
t = DECL_CHAIN (t);
}
}
else if (TREE_CODE (value) == TREE_LIST)
@ -1946,3 +1968,204 @@ emit_thunk (thunk_fndecl)
decl_printable_name = save_decl_printable_name;
current_function_decl = 0;
}
/* Code for synthesizing methods which have default semantics defined. */
void
build_default_constructor (fndecl)
tree fndecl;
{
start_function (NULL_TREE, fndecl, NULL_TREE, 1);
store_parm_decls ();
setup_vtbl_ptr ();
finish_function (lineno, 0);
}
/* For the anonymous union in TYPE, return the member that is at least as
large as the rest of the members, so we can copy it. */
static tree
largest_union_member (type)
tree type;
{
tree f, type_size = TYPE_SIZE (type);
for (f = TYPE_FIELDS (type); f; f = TREE_CHAIN (f))
if (simple_cst_equal (DECL_SIZE (f), type_size))
return f;
/* We should always find one. */
my_friendly_abort (323);
return NULL_TREE;
}
/* Generate code for default X(X&) constructor. */
void
build_copy_constructor (fndecl)
tree fndecl;
{
tree parm = TREE_CHAIN (DECL_ARGUMENTS (fndecl));
tree t;
start_function (NULL_TREE, fndecl, NULL_TREE, 1);
store_parm_decls ();
clear_last_expr ();
push_momentary ();
if (TYPE_USES_VIRTUAL_BASECLASSES (current_class_type))
parm = TREE_CHAIN (parm);
parm = convert_from_reference (parm);
if (! TYPE_HAS_COMPLEX_INIT_REF (current_class_type))
{
t = build (INIT_EXPR, void_type_node, C_C_D, parm);
TREE_SIDE_EFFECTS (t) = 1;
cplus_expand_expr_stmt (t);
}
else
{
tree fields = TYPE_FIELDS (current_class_type);
int n_bases = CLASSTYPE_N_BASECLASSES (current_class_type);
tree binfos = TYPE_BINFO_BASETYPES (current_class_type);
int i;
for (t = CLASSTYPE_VBASECLASSES (current_class_type); t;
t = TREE_CHAIN (t))
{
tree basetype = BINFO_TYPE (t);
tree p = convert (build_reference_type (basetype), parm);
p = convert_from_reference (p);
current_base_init_list = tree_cons (TYPE_NESTED_NAME (basetype),
p, current_base_init_list);
}
for (i = 0; i < n_bases; ++i)
{
tree p, basetype = TREE_VEC_ELT (binfos, i);
if (TREE_VIA_VIRTUAL (basetype))
continue;
basetype = BINFO_TYPE (basetype);
p = convert (build_reference_type (basetype), parm);
p = convert_from_reference (p);
current_base_init_list = tree_cons (TYPE_NESTED_NAME (basetype),
p, current_base_init_list);
}
for (; fields; fields = TREE_CHAIN (fields))
{
tree name, init, t;
if (TREE_CODE (fields) != FIELD_DECL)
continue;
if (DECL_NAME (fields))
{
if (VFIELD_NAME_P (DECL_NAME (fields)))
continue;
if (VBASE_NAME_P (DECL_NAME (fields)))
continue;
/* True for duplicate members. */
if (IDENTIFIER_CLASS_VALUE (DECL_NAME (fields)) != fields)
continue;
}
else if ((t = TREE_TYPE (fields)) != NULL_TREE
&& TREE_CODE (t) == UNION_TYPE
&& ANON_AGGRNAME_P (TYPE_IDENTIFIER (t))
&& TYPE_FIELDS (t) != NULL_TREE)
fields = largest_union_member (t);
else
continue;
init = build (COMPONENT_REF, TREE_TYPE (fields), parm, fields);
init = build_tree_list (NULL_TREE, init);
current_member_init_list
= tree_cons (DECL_NAME (fields), init, current_member_init_list);
}
current_member_init_list = nreverse (current_member_init_list);
setup_vtbl_ptr ();
}
pop_momentary ();
finish_function (lineno, 0);
}
void
build_assign_ref (fndecl)
tree fndecl;
{
tree parm = TREE_CHAIN (DECL_ARGUMENTS (fndecl));
start_function (NULL_TREE, fndecl, NULL_TREE, 1);
store_parm_decls ();
push_momentary ();
parm = convert_from_reference (parm);
if (! TYPE_HAS_COMPLEX_ASSIGN_REF (current_class_type))
{
tree t = build (MODIFY_EXPR, void_type_node, C_C_D, parm);
TREE_SIDE_EFFECTS (t) = 1;
cplus_expand_expr_stmt (t);
}
else
{
tree fields = TYPE_FIELDS (current_class_type);
int n_bases = CLASSTYPE_N_BASECLASSES (current_class_type);
tree binfos = TYPE_BINFO_BASETYPES (current_class_type);
int i;
for (i = 0; i < n_bases; ++i)
{
tree basetype = BINFO_TYPE (TREE_VEC_ELT (binfos, i));
if (TYPE_HAS_ASSIGN_REF (basetype))
{
tree p = convert (build_reference_type (basetype), parm);
p = convert_from_reference (p);
p = build_member_call (TYPE_NESTED_NAME (basetype),
ansi_opname [MODIFY_EXPR],
build_tree_list (NULL_TREE, p));
expand_expr_stmt (p);
}
}
for (; fields; fields = TREE_CHAIN (fields))
{
tree comp, init, t;
if (TREE_CODE (fields) != FIELD_DECL)
continue;
if (DECL_NAME (fields))
{
if (VFIELD_NAME_P (DECL_NAME (fields)))
continue;
if (VBASE_NAME_P (DECL_NAME (fields)))
continue;
/* True for duplicate members. */
if (IDENTIFIER_CLASS_VALUE (DECL_NAME (fields)) != fields)
continue;
}
else if ((t = TREE_TYPE (fields)) != NULL_TREE
&& TREE_CODE (t) == UNION_TYPE
&& ANON_AGGRNAME_P (TYPE_IDENTIFIER (t))
&& TYPE_FIELDS (t) != NULL_TREE)
fields = largest_union_member (t);
else
continue;
comp = build (COMPONENT_REF, TREE_TYPE (fields), C_C_D, fields);
init = build (COMPONENT_REF, TREE_TYPE (fields), parm, fields);
expand_expr_stmt (build_modify_expr (comp, NOP_EXPR, init));
}
}
c_expand_return (C_C_D);
pop_momentary ();
finish_function (lineno, 0);
}
void
build_dtor (fndecl)
tree fndecl;
{
start_function (NULL_TREE, fndecl, NULL_TREE, 1);
store_parm_decls ();
finish_function (lineno, 0);
}

File diff suppressed because it is too large Load Diff

View File

@ -81,18 +81,19 @@ process_template_parm (list, next)
{
tree parm;
tree decl = 0;
tree defval;
int is_type;
parm = next;
my_friendly_assert (TREE_CODE (parm) == TREE_LIST, 259);
is_type = TREE_CODE (TREE_PURPOSE (parm)) == IDENTIFIER_NODE;
defval = TREE_PURPOSE (parm);
parm = TREE_VALUE (parm);
is_type = TREE_PURPOSE (parm) == class_type_node;
if (!is_type)
{
tree tinfo = 0;
parm = TREE_PURPOSE (parm);
my_friendly_assert (TREE_CODE (parm) == TREE_LIST, 260);
parm = TREE_VALUE (parm);
my_friendly_assert (TREE_CODE (TREE_PURPOSE (parm)) == TREE_LIST, 260);
/* is a const-param */
parm = grokdeclarator (TREE_VALUE (next), TREE_PURPOSE (next),
parm = grokdeclarator (TREE_VALUE (parm), TREE_PURPOSE (parm),
PARM, 0, NULL_TREE);
/* A template parameter is not modifiable. */
TREE_READONLY (parm) = 1;
@ -117,11 +118,19 @@ process_template_parm (list, next)
else
{
tree t = make_node (TEMPLATE_TYPE_PARM);
decl = build_decl (TYPE_DECL, TREE_PURPOSE (parm), t);
TYPE_NAME (t) = decl;
TREE_VALUE (parm) = t;
decl = build_decl (TYPE_DECL, TREE_VALUE (parm), t);
TYPE_MAIN_DECL (t) = decl;
parm = decl;
if (defval)
{
if (IDENTIFIER_HAS_TYPE_VALUE (defval))
defval = IDENTIFIER_TYPE_VALUE (defval);
else
defval = TREE_TYPE (IDENTIFIER_GLOBAL_VALUE (defval));
}
}
pushdecl (decl);
parm = build_tree_list (defval, parm);
return chainon (list, parm);
}
@ -135,6 +144,7 @@ end_template_parm_list (parms)
tree parms;
{
int nparms = 0;
int saw_default = 0;
tree saved_parmlist;
tree parm;
for (parm = parms; parm; parm = TREE_CHAIN (parm))
@ -143,13 +153,19 @@ end_template_parm_list (parms)
for (parm = parms, nparms = 0; parm; parm = TREE_CHAIN (parm), nparms++)
{
tree p = parm;
if (TREE_CODE (p) == TREE_LIST)
tree p = TREE_VALUE (parm);
if (TREE_PURPOSE (parm))
saw_default = 1;
else if (saw_default)
{
tree t = TREE_VALUE (p);
TREE_VALUE (p) = NULL_TREE;
p = TREE_PURPOSE (p);
my_friendly_assert (TREE_CODE (p) == IDENTIFIER_NODE, 261);
error ("if a default argument is given for one template parameter");
error ("default arguments must be given for all subsequent");
error ("parameters as well");
}
if (TREE_CODE (p) == TYPE_DECL)
{
tree t = TREE_TYPE (p);
TEMPLATE_TYPE_SET_INFO (t, saved_parmlist, nparms);
}
else
@ -158,7 +174,7 @@ end_template_parm_list (parms)
DECL_INITIAL (p) = NULL_TREE;
TEMPLATE_CONST_SET_INFO (tinfo, saved_parmlist, nparms);
}
TREE_VEC_ELT (saved_parmlist, nparms) = p;
TREE_VEC_ELT (saved_parmlist, nparms) = parm;
}
set_current_level_tags_transparency (1);
processing_template_decl++;
@ -317,7 +333,7 @@ grok_template_type (tvec, type)
{
/* we are here for cases like const T* etc. */
grok_template_type (tvec, &TYPE_MAIN_VARIANT (*type));
*type = c_build_type_variant (TYPE_MAIN_VARIANT (*type),
*type = cp_build_type_variant (TYPE_MAIN_VARIANT (*type),
TYPE_READONLY (*type),
TYPE_VOLATILE (*type));
}
@ -354,51 +370,68 @@ coerce_template_parms (parms, arglist, in_decl)
tree parms, arglist;
tree in_decl;
{
int nparms, i, lost = 0;
int nparms, nargs, i, lost = 0;
tree vec;
if (TREE_CODE (arglist) == TREE_VEC)
nparms = TREE_VEC_LENGTH (arglist);
if (arglist == NULL_TREE)
nargs = 0;
else if (TREE_CODE (arglist) == TREE_VEC)
nargs = TREE_VEC_LENGTH (arglist);
else
nparms = list_length (arglist);
if (nparms != TREE_VEC_LENGTH (parms))
nargs = list_length (arglist);
nparms = TREE_VEC_LENGTH (parms);
if (nargs > nparms
|| (nargs < nparms
&& TREE_PURPOSE (TREE_VEC_ELT (parms, nargs)) == NULL_TREE))
{
error ("incorrect number of parameters (%d, should be %d)",
nparms, TREE_VEC_LENGTH (parms));
nargs, nparms);
if (in_decl)
cp_error_at ("in template expansion for decl `%D'", in_decl);
return error_mark_node;
}
if (TREE_CODE (arglist) == TREE_VEC)
if (arglist && TREE_CODE (arglist) == TREE_VEC)
vec = copy_node (arglist);
else
{
vec = make_tree_vec (nparms);
for (i = 0; i < nparms; i++)
{
tree arg = arglist;
arglist = TREE_CHAIN (arglist);
if (arg == error_mark_node)
lost++;
tree arg;
if (arglist)
{
arg = arglist;
arglist = TREE_CHAIN (arglist);
if (arg == error_mark_node)
lost++;
else
arg = TREE_VALUE (arg);
}
else
arg = TREE_VALUE (arg);
arg = TREE_PURPOSE (TREE_VEC_ELT (parms, i));
TREE_VEC_ELT (vec, i) = arg;
}
}
for (i = 0; i < nparms; i++)
{
tree arg = TREE_VEC_ELT (vec, i);
tree parm = TREE_VEC_ELT (parms, i);
tree parm = TREE_VALUE (TREE_VEC_ELT (parms, i));
tree val = 0;
int is_type, requires_type;
is_type = TREE_CODE_CLASS (TREE_CODE (arg)) == 't';
requires_type = TREE_CODE (parm) == IDENTIFIER_NODE;
requires_type = TREE_CODE (parm) == TYPE_DECL;
if (is_type != requires_type)
{
if (in_decl)
cp_error_at ("type/value mismatch in template parameter list for `%D'", in_decl);
cp_error ("type/value mismatch in template parameter list for `%D'",
in_decl);
lost++;
TREE_VEC_ELT (vec, i) = error_mark_node;
continue;
@ -415,7 +448,7 @@ coerce_template_parms (parms, arglist, in_decl)
{
grok_template_type (vec, &TREE_TYPE (parm));
val = digest_init (TREE_TYPE (parm), arg, (tree *) 0);
if (val == error_mark_node)
;
@ -489,12 +522,13 @@ mangle_class_name_for_template (name, parms, arglist)
my_friendly_assert (nparms == TREE_VEC_LENGTH (arglist), 268);
for (i = 0; i < nparms; i++)
{
tree parm = TREE_VEC_ELT (parms, i), arg = TREE_VEC_ELT (arglist, i);
tree parm = TREE_VALUE (TREE_VEC_ELT (parms, i));
tree arg = TREE_VEC_ELT (arglist, i);
if (i)
ccat (',');
if (TREE_CODE (parm) == IDENTIFIER_NODE)
if (TREE_CODE (parm) == TYPE_DECL)
{
cat (type_as_string (arg, 0));
continue;
@ -573,7 +607,7 @@ lookup_template_class (d1, arglist, in_decl)
}
parmlist = DECL_TEMPLATE_PARMS (template);
arglist = coerce_template_parms (parmlist, arglist, in_decl);
arglist = coerce_template_parms (parmlist, arglist, template);
if (arglist == error_mark_node)
return error_mark_node;
if (uses_template_parms (arglist))
@ -619,11 +653,11 @@ push_template_decls (parmlist, arglist, class_level)
for (i = 0; i < nparms; i++)
{
int requires_type, is_type;
tree parm = TREE_VEC_ELT (parmlist, i);
tree parm = TREE_VALUE (TREE_VEC_ELT (parmlist, i));
tree arg = TREE_VEC_ELT (arglist, i);
tree decl = 0;
requires_type = TREE_CODE (parm) == IDENTIFIER_NODE;
requires_type = TREE_CODE (parm) == TYPE_DECL;
is_type = TREE_CODE_CLASS (TREE_CODE (arg)) == 't';
if (is_type)
{
@ -635,7 +669,7 @@ push_template_decls (parmlist, arglist, class_level)
}
decl = arg;
my_friendly_assert (TREE_CODE_CLASS (TREE_CODE (decl)) == 't', 273);
decl = build_decl (TYPE_DECL, parm, decl);
decl = build_decl (TYPE_DECL, DECL_NAME (parm), decl);
}
else
{
@ -864,23 +898,25 @@ instantiate_member_templates (classname)
&TREE_VEC_ELT (parmvec, 0));
type = IDENTIFIER_TYPE_VALUE (id);
my_friendly_assert (type != 0, 277);
if (CLASSTYPE_INTERFACE_UNKNOWN (type))
if (flag_external_templates)
{
DECL_EXTERNAL (t2) = 0;
TREE_PUBLIC (t2) = 0;
}
else
{
DECL_EXTERNAL (t2) = CLASSTYPE_INTERFACE_ONLY (type);
TREE_PUBLIC (t2) = 1;
if (CLASSTYPE_INTERFACE_UNKNOWN (type))
{
DECL_EXTERNAL (t2) = 0;
TREE_PUBLIC (t2) = 0;
}
else
{
DECL_EXTERNAL (t2) = CLASSTYPE_INTERFACE_ONLY (type);
TREE_PUBLIC (t2) = 1;
}
}
break;
case 1:
/* Failure. */
failure:
cp_error ("type unification error instantiating %T::%D",
classname, tdecl);
cp_error_at ("for template declaration `%D'", tdecl);
cp_error_at ("type unification error instantiating `%D'", tdecl);
cp_error ("while instantiating members of `%T'", classname);
continue /* loop of members */;
default:
@ -1125,7 +1161,7 @@ tsubst (t, args, nargs, in_decl)
&& type != integer_type_node
&& type != void_type_node
&& type != char_type_node)
type = c_build_type_variant (tsubst (type, args, nargs, in_decl),
type = cp_build_type_variant (tsubst (type, args, nargs, in_decl),
TYPE_READONLY (type),
TYPE_VOLATILE (type));
switch (TREE_CODE (t))
@ -1162,7 +1198,7 @@ tsubst (t, args, nargs, in_decl)
tsubst (TYPE_MAX_VALUE (t), args, nargs, in_decl));
case TEMPLATE_TYPE_PARM:
return c_build_type_variant (args[TEMPLATE_TYPE_IDX (t)],
return cp_build_type_variant (args[TEMPLATE_TYPE_IDX (t)],
TYPE_READONLY (t),
TYPE_VOLATILE (t));
@ -1356,9 +1392,10 @@ tsubst (t, args, nargs, in_decl)
if (!got_it)
{
r = build_decl_overload (r, TYPE_VALUES (type),
DECL_CONTEXT (t) != NULL_TREE);
tree a = build_decl_overload (r, TYPE_VALUES (type),
DECL_CONTEXT (t) != NULL_TREE);
r = build_lang_decl (FUNCTION_DECL, r, type);
DECL_ASSEMBLER_NAME (r) = a;
}
else if (DECL_INLINE (r) && DECL_SAVED_INSNS (r))
{
@ -1392,9 +1429,11 @@ tsubst (t, args, nargs, in_decl)
make_decl_rtl (r, NULL_PTR, 1);
DECL_ARGUMENTS (r) = fnargs;
DECL_RESULT (r) = result;
#if 0
if (DECL_CONTEXT (t) == NULL_TREE
|| TREE_CODE_CLASS (TREE_CODE (DECL_CONTEXT (t))) != 't')
push_overloaded_decl_top_level (r, 0);
#endif
return r;
}
@ -1442,7 +1481,7 @@ tsubst (t, args, nargs, in_decl)
{
int len = TREE_VEC_LENGTH (t), need_new = 0, i;
tree *elts = (tree *) alloca (len * sizeof (tree));
bzero (elts, len * sizeof (tree));
bzero ((char *) elts, len * sizeof (tree));
for (i = 0; i < len; i++)
{
@ -1472,7 +1511,7 @@ tsubst (t, args, nargs, in_decl)
r = build_pointer_type (type);
else
r = build_reference_type (type);
r = c_build_type_variant (r, TYPE_READONLY (t), TYPE_VOLATILE (t));
r = cp_build_type_variant (r, TYPE_READONLY (t), TYPE_VOLATILE (t));
/* Will this ever be needed for TYPE_..._TO values? */
layout_type (r);
return r;
@ -1781,6 +1820,7 @@ overload_template_name (id, classlevel)
|| TREE_CODE (t) == UNINSTANTIATED_P_TYPE, 286);
decl = build_decl (TYPE_DECL, template, t);
SET_DECL_ARTIFICIAL (decl);
#if 0 /* fix this later */
/* We don't want to call here if the work has already been done. */
@ -1890,7 +1930,7 @@ reinit_parse_for_template (yychar, d1, d2)
if (!template_info)
{
template_info = (struct template_info *) permalloc (sizeof (struct template_info));
bzero (template_info, sizeof (struct template_info));
bzero ((char *) template_info, sizeof (struct template_info));
DECL_TEMPLATE_INFO (d2) = template_info;
}
template_info->filename = input_filename;
@ -1940,7 +1980,7 @@ type_unification (tparms, targs, parms, args, nsubsts, subr)
my_friendly_assert (ntparms > 0, 292);
if (!subr)
bzero (targs, sizeof (tree) * ntparms);
bzero ((char *) targs, sizeof (tree) * ntparms);
while (parms
&& parms != void_list_node
@ -1968,9 +2008,16 @@ type_unification (tparms, targs, parms, args, nsubsts, subr)
arg = TREE_TYPE (arg);
}
#endif
if (TREE_CODE (arg) == FUNCTION_TYPE
|| TREE_CODE (arg) == METHOD_TYPE)
arg = build_pointer_type (arg);
if (TREE_CODE (parm) != REFERENCE_TYPE)
{
if (TREE_CODE (arg) == FUNCTION_TYPE
|| TREE_CODE (arg) == METHOD_TYPE)
arg = build_pointer_type (arg);
else if (TREE_CODE (arg) == ARRAY_TYPE)
arg = build_pointer_type (TREE_TYPE (arg));
else
arg = TYPE_MAIN_VARIANT (arg);
}
switch (unify (tparms, targs, ntparms, parm, arg, nsubsts))
{
@ -2035,22 +2082,20 @@ unify (tparms, targs, ntparms, parm, arg, nsubsts)
return 1;
}
idx = TEMPLATE_TYPE_IDX (parm);
/* Template type parameters cannot contain cv-quals; i.e.
template <class T> void f (T& a, T& b) will not generate
void f (const int& a, const int& b). */
if (TYPE_READONLY (arg) > TYPE_READONLY (parm)
|| TYPE_VOLATILE (arg) > TYPE_VOLATILE (parm))
return 1;
arg = TYPE_MAIN_VARIANT (arg);
/* Simple cases: Value already set, does match or doesn't. */
if (targs[idx] == arg)
return 0;
else if (targs[idx])
{
if (TYPE_MAIN_VARIANT (targs[idx]) == TYPE_MAIN_VARIANT (arg))
/* allow different parms to have different cv-qualifiers */;
else
return 1;
}
/* Check for mixed types and values. */
if (TREE_CODE (TREE_VEC_ELT (tparms, idx)) != IDENTIFIER_NODE)
return 1;
/* Allow trivial conversions. */
if (TYPE_READONLY (parm) < TYPE_READONLY (arg)
|| TYPE_VOLATILE (parm) < TYPE_VOLATILE (arg))
/* Check for mixed types and values. */
if (TREE_CODE (TREE_VALUE (TREE_VEC_ELT (tparms, idx))) != TYPE_DECL)
return 1;
targs[idx] = arg;
return 0;
@ -2106,7 +2151,10 @@ unify (tparms, targs, ntparms, parm, arg, nsubsts)
case REAL_TYPE:
case INTEGER_TYPE:
if (TREE_CODE (parm) == INTEGER_TYPE && TREE_CODE (arg) == INTEGER_TYPE)
if (TREE_CODE (arg) != TREE_CODE (parm))
return 1;
if (TREE_CODE (parm) == INTEGER_TYPE)
{
if (TYPE_MIN_VALUE (parm) && TYPE_MIN_VALUE (arg)
&& unify (tparms, targs, ntparms,
@ -2377,16 +2425,20 @@ do_function_instantiation (declspecs, declarator, storage)
if (flag_external_templates)
return;
if (DECL_EXPLICIT_INSTANTIATION (result) && ! DECL_EXTERNAL (result))
if (DECL_EXPLICIT_INSTANTIATION (result) && TREE_PUBLIC (result))
return;
SET_DECL_EXPLICIT_INSTANTIATION (result);
TREE_PUBLIC (result) = 1;
if (storage == NULL_TREE)
DECL_EXTERNAL (result) = DECL_INLINE (result) && ! flag_implement_inlines;
{
TREE_PUBLIC (result) = 1;
DECL_EXTERNAL (result) = (DECL_INLINE (result)
&& ! flag_implement_inlines);
TREE_STATIC (result) = ! DECL_EXTERNAL (result);
}
else if (storage == ridpointers[(int) RID_EXTERN])
DECL_EXTERNAL (result) = 1;
;
else
cp_error ("storage class `%D' applied to template instantiation",
storage);
@ -2399,12 +2451,11 @@ do_type_instantiation (name, storage)
tree t = TREE_TYPE (name);
int extern_p;
/* With -fexternal-templates, explicit instantiations are treated the same
as implicit ones. */
if (flag_external_templates)
return;
if (CLASSTYPE_EXPLICIT_INSTANTIATION (t) && ! CLASSTYPE_INTERFACE_ONLY (t))
return;
if (TYPE_SIZE (t) == NULL_TREE)
{
cp_error ("explicit instantiation of `%#T' before definition of template",
@ -2423,30 +2474,62 @@ do_type_instantiation (name, storage)
extern_p = 0;
}
SET_CLASSTYPE_EXPLICIT_INSTANTIATION (t);
CLASSTYPE_VTABLE_NEEDS_WRITING (t) = ! extern_p;
SET_CLASSTYPE_INTERFACE_KNOWN (t);
CLASSTYPE_INTERFACE_ONLY (t) = extern_p;
if (! extern_p)
/* We've already instantiated this. */
if (CLASSTYPE_EXPLICIT_INSTANTIATION (t) && CLASSTYPE_INTERFACE_KNOWN (t))
{
CLASSTYPE_DEBUG_REQUESTED (t) = 1;
TYPE_DECL_SUPPRESS_DEBUG (TYPE_NAME (t)) = 0;
rest_of_type_compilation (t, 1);
if (! extern_p)
cp_pedwarn ("multiple explicit instantiation of `%#T'", t);
return;
}
if (! CLASSTYPE_TEMPLATE_SPECIALIZATION (t))
{
SET_CLASSTYPE_EXPLICIT_INSTANTIATION (t);
if (! extern_p)
{
SET_CLASSTYPE_INTERFACE_KNOWN (t);
CLASSTYPE_INTERFACE_ONLY (t) = 0;
CLASSTYPE_VTABLE_NEEDS_WRITING (t) = 1;
CLASSTYPE_DEBUG_REQUESTED (t) = 1;
TYPE_DECL_SUPPRESS_DEBUG (TYPE_NAME (t)) = 0;
rest_of_type_compilation (t, 1);
}
}
instantiate_member_templates (TYPE_IDENTIFIER (t));
/* this should really be done by instantiate_member_templates */
{
tree method = TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (t), 0);
for (; method; method = TREE_CHAIN (method))
tree tmp = TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (t), 0);
for (; tmp; tmp = TREE_CHAIN (tmp))
{
SET_DECL_EXPLICIT_INSTANTIATION (method);
TREE_PUBLIC (method) = 1;
DECL_EXTERNAL (method)
= (extern_p || (DECL_INLINE (method) && ! flag_implement_inlines));
}
}
if (DECL_TEMPLATE_SPECIALIZATION (tmp)
|| (DECL_USE_TEMPLATE (tmp) == 0
&& CLASSTYPE_TEMPLATE_SPECIALIZATION (t)))
continue;
/* and data member templates, too */
SET_DECL_EXPLICIT_INSTANTIATION (tmp);
if (! extern_p)
{
TREE_PUBLIC (tmp) = 1;
DECL_EXTERNAL (tmp) = (DECL_INLINE (tmp)
&& ! flag_implement_inlines);
TREE_STATIC (tmp) = ! DECL_EXTERNAL (tmp);
}
}
#if 0
for (tmp = TYPE_FIELDS (t); tmp; tmp = TREE_CHAIN (tmp))
{
if (TREE_CODE (tmp) == VAR_DECL)
/* eventually do something */;
}
#endif
for (tmp = CLASSTYPE_TAGS (t); tmp; tmp = TREE_CHAIN (tmp))
if (IS_AGGR_TYPE (TREE_VALUE (tmp)))
do_type_instantiation (TYPE_MAIN_DECL (TREE_VALUE (tmp)), storage);
}
}
tree

View File

@ -236,7 +236,7 @@ my_new_memoized_entry (chain)
struct memoized_entry *p =
(struct memoized_entry *)obstack_alloc (&type_obstack_entries,
sizeof (struct memoized_entry));
bzero (p, sizeof (struct memoized_entry));
bzero ((char *) p, sizeof (struct memoized_entry));
MEMOIZED_CHAIN (p) = chain;
MEMOIZED_UID (p) = ++my_memoized_entry_counter;
return p;
@ -768,6 +768,7 @@ compute_access (basetype_path, field)
tree types;
tree context;
int protected_ok, via_protected;
extern int flag_access_control;
#if 1
/* Replaces static decl above. */
tree previous_scope;
@ -776,6 +777,9 @@ compute_access (basetype_path, field)
((TREE_CODE (field) == FUNCTION_DECL && DECL_STATIC_FUNCTION_P (field))
|| (TREE_CODE (field) != FUNCTION_DECL && TREE_STATIC (field)));
if (! flag_access_control)
return access_public;
/* The field lives in the current class. */
if (BINFO_TYPE (basetype_path) == current_class_type)
return access_public;
@ -1913,6 +1917,10 @@ get_matching_virtual (binfo, fndecl, dtorp)
if (IDENTIFIER_VIRTUAL_P (declarator) == 0)
return NULL_TREE;
baselink = get_virtuals_named_this (binfo);
if (baselink == NULL_TREE)
return NULL_TREE;
drettype = TREE_TYPE (TREE_TYPE (fndecl));
dtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
if (DECL_STATIC_FUNCTION_P (fndecl))
@ -1920,8 +1928,7 @@ get_matching_virtual (binfo, fndecl, dtorp)
else
instptr_type = TREE_TYPE (TREE_VALUE (dtypes));
for (baselink = get_virtuals_named_this (binfo);
baselink; baselink = next_baselink (baselink))
for (; baselink; baselink = next_baselink (baselink))
{
for (tmp = TREE_VALUE (baselink); tmp; tmp = DECL_CHAIN (tmp))
{
@ -1945,7 +1952,7 @@ get_matching_virtual (binfo, fndecl, dtorp)
&& ! comptypes (TREE_TYPE (TREE_TYPE (tmp)), drettype, 1))
{
cp_error ("conflicting return type specified for virtual function `%#D'", fndecl);
cp_error ("overriding definition as `%#D'", tmp);
cp_error_at ("overriding definition as `%#D'", tmp);
SET_IDENTIFIER_ERROR_LOCUS (name, basetype);
}
break;
@ -2697,13 +2704,22 @@ free_mi_matrix ()
/* If we want debug info for a type TYPE, make sure all its base types
are also marked as being potentially interesting. This avoids
the problem of not writing any debug info for intermediate basetypes
that have abstract virtual functions. */
that have abstract virtual functions. Also mark member types. */
void
note_debug_info_needed (type)
tree type;
{
tree field;
dfs_walk (TYPE_BINFO (type), dfs_debug_mark, dfs_debug_unmarkedp);
for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
{
tree ttype;
if (TREE_CODE (field) == FIELD_DECL
&& IS_AGGR_TYPE (ttype = target_type (TREE_TYPE (field)))
&& dfs_debug_unmarkedp (TYPE_BINFO (ttype)))
note_debug_info_needed (ttype);
}
}
/* Subroutines of push_class_decls (). */

View File

@ -177,7 +177,7 @@ build_signature_pointer_or_reference_type (to_type, constp, volatilep, refp)
}
else
{
tree sig_tbl_type = c_build_type_variant (to_type, 1, 0);
tree sig_tbl_type = cp_build_type_variant (to_type, 1, 0);
sptr = build_lang_field_decl (FIELD_DECL,
get_identifier (SIGNATURE_SPTR_NAME),

View File

@ -202,8 +202,10 @@ shift_tokens (n)
* sizeof (struct token));
/* This move does not rely on the system being able to handle
overlapping moves. */
bcopy (nth_token (0), tmp, old_token_count * sizeof (struct token));
bcopy (tmp, nth_token (n), old_token_count * sizeof (struct token));
bcopy ((char *) nth_token (0), tmp,
old_token_count * sizeof (struct token));
bcopy (tmp, (char *) nth_token (n),
old_token_count * sizeof (struct token));
}
first_token = 0;
}

View File

@ -35,66 +35,75 @@ int
lvalue_p (ref)
tree ref;
{
register enum tree_code code = TREE_CODE (ref);
if (! language_lvalue_valid (ref))
return 0;
if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
return 1;
if (language_lvalue_valid (ref))
if (ref == current_class_decl && flag_this_is_variable <= 0)
return 0;
switch (TREE_CODE (ref))
{
if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
/* preincrements and predecrements are valid lvals, provided
what they refer to are valid lvals. */
case PREINCREMENT_EXPR:
case PREDECREMENT_EXPR:
case COMPONENT_REF:
case SAVE_EXPR:
return lvalue_p (TREE_OPERAND (ref, 0));
case STRING_CST:
return 1;
case VAR_DECL:
if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
&& DECL_LANG_SPECIFIC (ref)
&& DECL_IN_AGGR_P (ref))
return 0;
case INDIRECT_REF:
case ARRAY_REF:
case PARM_DECL:
case RESULT_DECL:
case ERROR_MARK:
if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
&& TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
return 1;
switch (code)
{
/* preincrements and predecrements are valid lvals, provided
what they refer to are valid lvals. */
case PREINCREMENT_EXPR:
case PREDECREMENT_EXPR:
case COMPONENT_REF:
case SAVE_EXPR:
return lvalue_p (TREE_OPERAND (ref, 0));
break;
case STRING_CST:
return 1;
case WITH_CLEANUP_EXPR:
return lvalue_p (TREE_OPERAND (ref, 0));
case VAR_DECL:
if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
&& DECL_LANG_SPECIFIC (ref)
&& DECL_IN_AGGR_P (ref))
return 0;
case INDIRECT_REF:
case ARRAY_REF:
case PARM_DECL:
case RESULT_DECL:
case ERROR_MARK:
if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
&& TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
return 1;
break;
case TARGET_EXPR:
return 1;
case TARGET_EXPR:
case WITH_CLEANUP_EXPR:
return 1;
case CALL_EXPR:
if (TREE_ADDRESSABLE (TREE_TYPE (ref)))
return 1;
break;
/* A currently unresolved scope ref. */
case SCOPE_REF:
my_friendly_abort (103);
case OFFSET_REF:
if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
return 1;
return lvalue_p (TREE_OPERAND (ref, 0))
&& lvalue_p (TREE_OPERAND (ref, 1));
break;
/* A currently unresolved scope ref. */
case SCOPE_REF:
my_friendly_abort (103);
case OFFSET_REF:
if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
return 1;
return lvalue_p (TREE_OPERAND (ref, 0))
&& lvalue_p (TREE_OPERAND (ref, 1));
break;
case COND_EXPR:
return (lvalue_p (TREE_OPERAND (ref, 1))
&& lvalue_p (TREE_OPERAND (ref, 2)));
case COND_EXPR:
return (lvalue_p (TREE_OPERAND (ref, 1))
&& lvalue_p (TREE_OPERAND (ref, 2)));
case MODIFY_EXPR:
return 1;
case MODIFY_EXPR:
return 1;
case COMPOUND_EXPR:
return lvalue_p (TREE_OPERAND (ref, 1));
}
case COMPOUND_EXPR:
return lvalue_p (TREE_OPERAND (ref, 1));
}
return 0;
}
@ -221,12 +230,15 @@ break_out_calls (exp)
return exp;
case 'd': /* A decl node */
#if 0 /* This is bogus. jason 9/21/94 */
t1 = break_out_calls (DECL_INITIAL (exp));
if (t1 != DECL_INITIAL (exp))
{
exp = copy_node (exp);
DECL_INITIAL (exp) = t1;
}
#endif
return exp;
case 'b': /* A block node */
@ -379,6 +391,40 @@ build_cplus_array_type (elt_type, index_type)
return t;
}
/* Make a variant type in the proper way for C/C++, propagating qualifiers
down to the element type of an array. */
tree
cp_build_type_variant (type, constp, volatilep)
tree type;
int constp, volatilep;
{
if (TREE_CODE (type) == ARRAY_TYPE)
{
tree real_main_variant = TYPE_MAIN_VARIANT (type);
push_obstacks (TYPE_OBSTACK (real_main_variant),
TYPE_OBSTACK (real_main_variant));
type = build_cplus_array_type (cp_build_type_variant (TREE_TYPE (type),
constp, volatilep),
TYPE_DOMAIN (type));
/* TYPE must be on same obstack as REAL_MAIN_VARIANT. If not,
make a copy. (TYPE might have come from the hash table and
REAL_MAIN_VARIANT might be in some function's obstack.) */
if (TYPE_OBSTACK (type) != TYPE_OBSTACK (real_main_variant))
{
type = copy_node (type);
TYPE_POINTER_TO (type) = TYPE_REFERENCE_TO (type) = 0;
}
TYPE_MAIN_VARIANT (type) = real_main_variant;
pop_obstacks ();
}
return build_type_variant (type, constp, volatilep);
}
/* Add OFFSET to all base types of T.
OFFSET, which is a type offset, is number of bytes.
@ -1184,50 +1230,6 @@ virtual_member (elem, list)
return rval;
}
/* Return the offset (as an INTEGER_CST) for ELEM in LIST.
INITIAL_OFFSET is the value to add to the offset that ELEM's
binfo entry in LIST provides.
Returns NULL if ELEM does not have an binfo value in LIST. */
tree
virtual_offset (elem, list, initial_offset)
tree elem;
tree list;
tree initial_offset;
{
tree vb, offset;
tree rval, nval;
for (vb = list; vb; vb = TREE_CHAIN (vb))
if (elem == BINFO_TYPE (vb))
return size_binop (PLUS_EXPR, initial_offset, BINFO_OFFSET (vb));
rval = 0;
for (vb = list; vb; vb = TREE_CHAIN (vb))
{
tree binfos = BINFO_BASETYPES (vb);
int i;
if (binfos == NULL_TREE)
continue;
for (i = TREE_VEC_LENGTH (binfos)-1; i >= 0; i--)
{
nval = binfo_value (elem, BINFO_TYPE (TREE_VEC_ELT (binfos, i)));
if (nval)
{
if (rval && BINFO_OFFSET (nval) != BINFO_OFFSET (rval))
my_friendly_abort (105);
offset = BINFO_OFFSET (vb);
rval = nval;
}
}
}
if (rval == NULL_TREE)
return rval;
return size_binop (PLUS_EXPR, offset, BINFO_OFFSET (rval));
}
void
debug_binfo (elem)
tree elem;
@ -1661,6 +1663,31 @@ make_deep_copy (t)
TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
return t;
case POINTER_TYPE:
return build_pointer_type (make_deep_copy (TREE_TYPE (t)));
case REFERENCE_TYPE:
return build_reference_type (make_deep_copy (TREE_TYPE (t)));
case FUNCTION_TYPE:
return build_function_type (make_deep_copy (TREE_TYPE (t)),
make_deep_copy (TYPE_ARG_TYPES (t)));
case ARRAY_TYPE:
return build_array_type (make_deep_copy (TREE_TYPE (t)),
make_deep_copy (TYPE_DOMAIN (t)));
case OFFSET_TYPE:
return build_offset_type (make_deep_copy (TYPE_OFFSET_BASETYPE (t)),
make_deep_copy (TREE_TYPE (t)));
case METHOD_TYPE:
return build_method_type
(make_deep_copy (TYPE_METHOD_BASETYPE (t)),
build_function_type
(make_deep_copy (TREE_TYPE (t)),
make_deep_copy (TREE_CHAIN (TYPE_ARG_TYPES (t)))));
case RECORD_TYPE:
if (TYPE_PTRMEMFUNC_P (t))
return build_ptrmemfunc_type
(make_deep_copy (TYPE_PTRMEMFUNC_FN_TYPE (t)));
/* else fall through */
/* This list is incomplete, but should suffice for now.
It is very important that `sorry' does not call
`report_error_function'. That could cause an infinite loop. */
@ -1738,7 +1765,7 @@ tree
array_type_nelts_top (type)
tree type;
{
return fold (build (PLUS_EXPR, integer_type_node,
return fold (build (PLUS_EXPR, sizetype,
array_type_nelts (type),
integer_one_node));
}
@ -1756,7 +1783,7 @@ array_type_nelts_total (type)
while (TREE_CODE (type) == ARRAY_TYPE)
{
tree n = array_type_nelts_top (type);
sz = fold (build (MULT_EXPR, integer_type_node, sz, n));
sz = fold (build (MULT_EXPR, sizetype, sz, n));
type = TREE_TYPE (type);
}
return sz;

View File

@ -161,7 +161,7 @@ qualify_type (type, like)
int constflag = TYPE_READONLY (type) || TYPE_READONLY (like);
int volflag = TYPE_VOLATILE (type) || TYPE_VOLATILE (like);
/* @@ Must do member pointers here. */
return c_build_type_variant (type, constflag, volflag);
return cp_build_type_variant (type, constflag, volflag);
}
/* Return the common type of two parameter lists.
@ -372,7 +372,7 @@ common_type (t1, t2)
= TYPE_READONLY (TREE_TYPE (t1)) || TYPE_READONLY (TREE_TYPE (t2));
int volatilep
= TYPE_VOLATILE (TREE_TYPE (t1)) || TYPE_VOLATILE (TREE_TYPE (t2));
target = c_build_type_variant (target, constp, volatilep);
target = cp_build_type_variant (target, constp, volatilep);
if (code1 == POINTER_TYPE)
t1 = build_pointer_type (target);
else
@ -734,8 +734,6 @@ comp_target_types (ttl, ttr, nptrs)
ttr = TYPE_MAIN_VARIANT (ttr);
if (ttl == ttr)
return 1;
if (TREE_CODE (ttr) == TEMPLATE_TYPE_PARM)
return 1;
if (TREE_CODE (ttr) != TREE_CODE (ttl))
return 0;
@ -813,12 +811,14 @@ common_base_type (tt1, tt2)
if (UNIQUELY_DERIVED_FROM_P (tt2, tt1))
return tt2;
#if 0
/* If they share a virtual baseclass, that's good enough. */
for (tmp = CLASSTYPE_VBASECLASSES (tt1); tmp; tmp = TREE_CHAIN (tmp))
{
if (binfo_member (BINFO_TYPE (tmp), CLASSTYPE_VBASECLASSES (tt2)))
return BINFO_TYPE (tmp);
}
#endif
/* Otherwise, try to find a unique baseclass of TT1
that is shared by TT2, and follow that down. */
@ -904,6 +904,8 @@ compparms (parms1, parms2, strict)
return t2 == void_list_node && TREE_PURPOSE (t1);
return TREE_PURPOSE (t1) || TREE_PURPOSE (t2);
}
#if 0
/* Default parms are not part of the type of a function. */
if (strict != 3 && TREE_PURPOSE (t1) && TREE_PURPOSE (t2))
{
int cmp = simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
@ -912,6 +914,7 @@ compparms (parms1, parms2, strict)
if (cmp == 0)
return 0;
}
#endif
t1 = TREE_CHAIN (t1);
t2 = TREE_CHAIN (t2);
@ -959,8 +962,6 @@ comp_target_parms (parms1, parms2, strict)
p2 = TREE_VALUE (t2);
if (p1 == p2)
continue;
if (TREE_CODE (p2) == TEMPLATE_TYPE_PARM)
continue;
if ((TREE_CODE (p1) == POINTER_TYPE && TREE_CODE (p2) == POINTER_TYPE)
|| (TREE_CODE (p1) == REFERENCE_TYPE && TREE_CODE (p2) == REFERENCE_TYPE))
@ -970,9 +971,6 @@ comp_target_parms (parms1, parms2, strict)
== TYPE_MAIN_VARIANT (TREE_TYPE (p2))))
continue;
if (TREE_CODE (TREE_TYPE (p2)) == TEMPLATE_TYPE_PARM)
continue;
/* The following is wrong for contravariance,
but many programs depend on it. */
if (TREE_TYPE (p1) == void_type_node)
@ -1308,7 +1306,7 @@ default_conversion (exp)
if (INTEGRAL_CODE_P (code))
{
tree t = type_promotes_to (type);
if (t != TYPE_MAIN_VARIANT (type))
if (t != type)
return convert (t, exp);
}
if (flag_traditional
@ -1380,7 +1378,7 @@ default_conversion (exp)
restype = TREE_TYPE (type);
if (TYPE_READONLY (type) || TYPE_VOLATILE (type)
|| constp || volatilep)
restype = c_build_type_variant (restype,
restype = cp_build_type_variant (restype,
TYPE_READONLY (type) || constp,
TYPE_VOLATILE (type) || volatilep);
ptrtype = build_pointer_type (restype);
@ -1410,8 +1408,19 @@ tree
build_object_ref (datum, basetype, field)
tree datum, basetype, field;
{
tree dtype;
if (datum == error_mark_node)
return error_mark_node;
dtype = TREE_TYPE (datum);
if (TREE_CODE (dtype) == REFERENCE_TYPE)
dtype = TREE_TYPE (dtype);
if (! IS_AGGR_TYPE_CODE (TREE_CODE (dtype)))
{
cp_error ("request for member `%T::%D' in expression of non-aggregate type `%T'",
basetype, field, dtype);
return error_mark_node;
}
else if (IS_SIGNATURE (IDENTIFIER_TYPE_VALUE (basetype)))
{
warning ("signature name in scope resolution ignored");
@ -1524,8 +1533,7 @@ build_component_ref (datum, component, basetype_path, protect)
register tree field = NULL;
register tree ref;
/* If DATUM is a COMPOUND_EXPR or COND_EXPR, move our reference inside it
unless we are not to support things not strictly ANSI. */
/* If DATUM is a COMPOUND_EXPR or COND_EXPR, move our reference inside it. */
switch (TREE_CODE (datum))
{
case COMPOUND_EXPR:
@ -1697,7 +1705,8 @@ build_component_ref (datum, component, basetype_path, protect)
datum = build_indirect_ref (addr, NULL_PTR);
my_friendly_assert (datum != error_mark_node, 311);
}
ref = build (COMPONENT_REF, TREE_TYPE (field), break_out_cleanups (datum), field);
ref = fold (build (COMPONENT_REF, TREE_TYPE (field),
break_out_cleanups (datum), field));
if (TREE_READONLY (datum) || TREE_READONLY (field))
TREE_READONLY (ref) = 1;
@ -2519,13 +2528,15 @@ convert_arguments (return_loc, typelist, values, fndecl, flags)
&& (type == 0 || TREE_CODE (type) != REFERENCE_TYPE))
val = TREE_OPERAND (val, 0);
if ((type == 0 || TREE_CODE (type) != REFERENCE_TYPE)
&& (TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE
if (type == 0 || TREE_CODE (type) != REFERENCE_TYPE)
{
if (TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE
|| TREE_CODE (TREE_TYPE (val)) == FUNCTION_TYPE
|| TREE_CODE (TREE_TYPE (val)) == METHOD_TYPE))
val = default_conversion (val);
|| TREE_CODE (TREE_TYPE (val)) == METHOD_TYPE)
val = default_conversion (val);
val = require_complete_type (val);
val = require_complete_type (val);
}
if (val == error_mark_node)
continue;
@ -2544,7 +2555,8 @@ convert_arguments (return_loc, typelist, values, fndecl, flags)
}
else
{
#ifdef PROMOTE_PROTOTYPES
#if 0 && defined (PROMOTE_PROTOTYPES)
/* This breaks user-defined conversions. */
/* Rather than truncating and then reextending,
convert directly to int, if that's the type we will want. */
if (! flag_traditional
@ -2607,7 +2619,7 @@ convert_arguments (return_loc, typelist, values, fndecl, flags)
/* See if there are default arguments that can be used */
if (TREE_PURPOSE (typetail))
{
while (typetail != void_list_node)
for (; typetail != void_list_node; ++i)
{
tree type = TREE_VALUE (typetail);
tree val = TREE_PURPOSE (typetail);
@ -3247,6 +3259,7 @@ build_binary_op_nodefault (code, orig_op0, orig_op1, error_code)
case GE_EXPR:
case LT_EXPR:
case GT_EXPR:
result_type = bool_type_node;
if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE)
&& (code1 == INTEGER_TYPE || code1 == REAL_TYPE))
short_compare = 1;
@ -3295,7 +3308,8 @@ build_binary_op_nodefault (code, orig_op0, orig_op1, error_code)
warning ("comparison between pointer and integer");
op0 = convert (TREE_TYPE (op1), op0);
}
result_type = bool_type_node;
else
result_type = 0;
converted = 1;
break;
}
@ -3717,7 +3731,7 @@ build_component_addr (arg, argtype, msg)
}
else
/* This conversion is harmless. */
rval = convert (argtype, rval);
rval = convert_force (argtype, rval);
if (! integer_zerop (DECL_FIELD_BITPOS (field)))
{
@ -4088,42 +4102,16 @@ build_unary_op (code, xarg, noconvert)
if (TREE_CODE (arg) == TREE_LIST)
{
/* Look at methods with only this name. */
if (TREE_CODE (TREE_VALUE (arg)) == FUNCTION_DECL)
{
tree targ = TREE_VALUE (arg);
/* If this function is unique, or it is a unique
constructor, we can take its address easily. */
if (DECL_CHAIN (targ) == NULL_TREE
|| (DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (targ))
&& DECL_CHAIN (DECL_CHAIN (targ)) == NULL_TREE))
{
if (DECL_CHAIN (targ))
targ = DECL_CHAIN (targ);
if (DECL_CLASS_CONTEXT (targ))
targ = build (OFFSET_REF, TREE_TYPE (targ), C_C_D, targ);
val = unary_complex_lvalue (ADDR_EXPR, targ);
if (val)
return val;
}
/* This possible setting of TREE_CONSTANT is what makes it possible
with an initializer list to emit the entire thing in the data
section, rather than a run-time initialization. */
arg = build1 (ADDR_EXPR, unknown_type_node, arg);
if (staticp (targ))
TREE_CONSTANT (arg) = 1;
return arg;
}
if (TREE_CODE (TREE_VALUE (arg)) == FUNCTION_DECL
&& DECL_CHAIN (TREE_VALUE (arg)) == NULL_TREE)
/* Unique overloaded non-member function. */
return build_unary_op (ADDR_EXPR, TREE_VALUE (arg), 0);
if (TREE_CHAIN (arg) == NULL_TREE
&& TREE_CODE (TREE_VALUE (arg)) == TREE_LIST
&& DECL_CHAIN (TREE_VALUE (TREE_VALUE (arg))) == NULL_TREE)
{
/* Unique overloaded member function. */
return build_unary_op (ADDR_EXPR, TREE_VALUE (TREE_VALUE (arg)), 0);
}
/* Unique overloaded member function. */
return build_unary_op (ADDR_EXPR, TREE_VALUE (TREE_VALUE (arg)),
0);
return build1 (ADDR_EXPR, unknown_type_node, arg);
}
@ -4165,7 +4153,7 @@ build_unary_op (code, xarg, noconvert)
|| TREE_CODE_CLASS (TREE_CODE (arg)) == 'r')
{
if (TREE_READONLY (arg) || TREE_THIS_VOLATILE (arg))
argtype = c_build_type_variant (argtype,
argtype = cp_build_type_variant (argtype,
TREE_READONLY (arg),
TREE_THIS_VOLATILE (arg));
}
@ -4573,7 +4561,7 @@ build_conditional_expr (ifexp, op1, op2)
else if (TREE_READONLY_DECL_P (op2))
op2 = decl_constant_value (op2);
if (type1 != type2)
type1 = c_build_type_variant
type1 = cp_build_type_variant
(type1,
TREE_READONLY (op1) || TREE_READONLY (op2),
TREE_THIS_VOLATILE (op1) || TREE_THIS_VOLATILE (op2));
@ -4622,7 +4610,7 @@ build_conditional_expr (ifexp, op1, op2)
if (type1 == type2)
result_type = type1;
else
result_type = c_build_type_variant
result_type = cp_build_type_variant
(type1,
TREE_READONLY (op1) || TREE_READONLY (op2),
TREE_THIS_VOLATILE (op1) || TREE_THIS_VOLATILE (op2));
@ -4669,11 +4657,20 @@ build_conditional_expr (ifexp, op1, op2)
{
if (result_type == error_mark_node)
{
message_2_types (error, "common base type of types `%s' and `%s' is ambiguous",
TREE_TYPE (type1), TREE_TYPE (type2));
cp_error ("common base type of types `%T' and `%T' is ambiguous",
TREE_TYPE (type1), TREE_TYPE (type2));
result_type = ptr_type_node;
}
else result_type = TYPE_POINTER_TO (result_type);
else
{
if (pedantic
&& result_type != TREE_TYPE (type1)
&& result_type != TREE_TYPE (type2))
cp_pedwarn ("`%T' and `%T' converted to `%T *' in conditional expression",
type1, type2, result_type);
result_type = TYPE_POINTER_TO (result_type);
}
}
else
{
@ -4939,7 +4936,7 @@ build_c_cast (type, expr)
value = TREE_VALUE (value);
if (TREE_CODE (type) == VOID_TYPE)
value = build1 (NOP_EXPR, type, value);
value = build1 (CONVERT_EXPR, type, value);
else if (TREE_TYPE (value) == NULL_TREE
|| type_unknown_p (value))
{
@ -5002,6 +4999,9 @@ build_c_cast (type, expr)
warning ("cast to pointer from integer of different size");
#endif
if (TREE_READONLY_DECL_P (value))
value = decl_constant_value (value);
ovalue = value;
value = convert_force (type, value);
@ -5326,13 +5326,13 @@ build_modify_expr (lhs, modifycode, rhs)
tree olhstype = lhstype;
tree olhs = lhs;
/* Types that aren't fully specified cannot be used in assignments. */
lhs = require_complete_type (lhs);
/* Avoid duplicate error messages from operands that had errors. */
if (TREE_CODE (lhs) == ERROR_MARK || TREE_CODE (rhs) == ERROR_MARK)
return error_mark_node;
/* Types that aren't fully specified cannot be used in assignments. */
lhs = require_complete_type (lhs);
/* Decide early if we are going to protect RHS from GC
before assigning it to LHS. */
if (type_needs_gc_entry (TREE_TYPE (rhs))
@ -5388,9 +5388,9 @@ build_modify_expr (lhs, modifycode, rhs)
so the code to compute it is only emitted once. */
tree cond
= build_conditional_expr (TREE_OPERAND (lhs, 0),
build_modify_expr (TREE_OPERAND (lhs, 1),
build_modify_expr (convert (TREE_TYPE (lhs), TREE_OPERAND (lhs, 1)),
modifycode, rhs),
build_modify_expr (TREE_OPERAND (lhs, 2),
build_modify_expr (convert (TREE_TYPE (lhs), TREE_OPERAND (lhs, 2)),
modifycode, rhs));
if (TREE_CODE (cond) == ERROR_MARK)
return cond;
@ -5763,10 +5763,12 @@ build_modify_expr (lhs, modifycode, rhs)
if (TREE_CODE (lhstype) == ARRAY_TYPE)
{
int from_array;
/* Allow array assignment in compiler-generated code. */
if ((pedantic || flag_ansi)
&& ! DECL_ARTIFICIAL (current_function_decl))
pedwarn ("ANSI C++ forbids assignment between arrays");
pedwarn ("ANSI C++ forbids assignment of arrays");
/* Have to wrap this in RTL_EXPR for two cases:
in base or member initialization and if we
@ -5782,8 +5784,10 @@ build_modify_expr (lhs, modifycode, rhs)
/* As a matter of principle, `start_sequence' should do this. */
emit_note (0, -1);
from_array = TREE_CODE (TREE_TYPE (newrhs)) == ARRAY_TYPE
? 1 + (modifycode != INIT_EXPR): 0;
expand_vec_init (lhs, lhs, array_type_nelts (lhstype), newrhs,
1 + (modifycode != INIT_EXPR));
from_array);
do_pending_stack_adjust ();
@ -6101,12 +6105,15 @@ build_ptrmemfunc (type, pfn, force)
return digest_init (TYPE_GET_PTRMEMFUNC_TYPE (type), u, (tree*)0);
}
if (TREE_CODE (pfn) == TREE_LIST)
if (TREE_CODE (pfn) == TREE_LIST
|| (TREE_CODE (pfn) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (pfn, 0)) == TREE_LIST))
{
pfn = instantiate_type (type, pfn, 1);
if (pfn == error_mark_node)
return error_mark_node;
pfn = build_unary_op (ADDR_EXPR, pfn, 0);
if (TREE_CODE (pfn) != ADDR_EXPR)
pfn = build_unary_op (ADDR_EXPR, pfn, 0);
}
/* Allow pointer to member conversions here. */
@ -6445,7 +6452,8 @@ convert_for_assignment (type, rhs, errtype, fndecl, parmnum)
add_quals = 1;
left_const &= TYPE_READONLY (ttl);
if (TREE_CODE (ttl) != POINTER_TYPE)
if (TREE_CODE (ttl) != POINTER_TYPE
|| TREE_CODE (ttr) != POINTER_TYPE)
break;
}
unsigned_parity = TREE_UNSIGNED (ttl) - TREE_UNSIGNED (ttr);
@ -6561,10 +6569,11 @@ convert_for_assignment (type, rhs, errtype, fndecl, parmnum)
}
return null_pointer_node;
}
else if (codel == INTEGER_TYPE
else if ((codel == INTEGER_TYPE || codel == BOOLEAN_TYPE)
&& (coder == POINTER_TYPE
|| (coder == RECORD_TYPE
&& (IS_SIGNATURE_POINTER (rhstype)
|| TYPE_PTRMEMFUNC_FLAG (rhstype)
|| IS_SIGNATURE_REFERENCE (rhstype)))))
{
if (fndecl)
@ -6717,7 +6726,8 @@ convert_for_initialization (exp, type, rhs, flags, errtype, fndecl, parmnum)
&& (IS_SIGNATURE_POINTER (type) || IS_SIGNATURE_REFERENCE (type)))
return build_signature_pointer_constructor (type, rhs);
if (IS_AGGR_TYPE (type) && TYPE_NEEDS_CONSTRUCTING (type))
if (IS_AGGR_TYPE (type)
&& (TYPE_NEEDS_CONSTRUCTING (type) || TREE_HAS_CONSTRUCTOR (rhs)))
{
if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (rhstype))
{
@ -6973,8 +6983,11 @@ c_expand_return (retval)
while (TREE_CODE (whats_returned) == NEW_EXPR
|| TREE_CODE (whats_returned) == TARGET_EXPR
|| TREE_CODE (whats_returned) == WITH_CLEANUP_EXPR)
/* Get the target. */
whats_returned = TREE_OPERAND (whats_returned, 0);
{
/* Get the target. */
whats_returned = TREE_OPERAND (whats_returned, 0);
warning ("returning reference to temporary");
}
}
if (TREE_CODE (whats_returned) == VAR_DECL && DECL_NAME (whats_returned))
@ -7028,6 +7041,7 @@ c_expand_return (retval)
&& TREE_CODE (TREE_OPERAND (retval, 0)) == TARGET_EXPR)
retval = TREE_OPERAND (retval, 0);
expand_aggr_init (result, retval, 0);
expand_cleanups_to (NULL_TREE);
DECL_INITIAL (result) = NULL_TREE;
retval = 0;
}
@ -7046,6 +7060,7 @@ c_expand_return (retval)
&& any_pending_cleanups (1))
{
retval = get_temp_regvar (valtype, retval);
expand_cleanups_to (NULL_TREE);
use_temp = obey_regdecls;
result = 0;
}
@ -7071,7 +7086,10 @@ c_expand_return (retval)
{
/* Everything's great--RETVAL is in RESULT. */
if (original_result_rtx)
store_expr (result, original_result_rtx, 0);
{
store_expr (result, original_result_rtx, 0);
expand_cleanups_to (NULL_TREE);
}
else if (retval && retval != result)
{
/* Clear this out so the later call to decl_function_context
@ -7081,6 +7099,9 @@ c_expand_return (retval)
/* Here is where we finally get RETVAL into RESULT.
`expand_return' does the magic of protecting
RESULT from cleanups. */
retval = build1 (CLEANUP_POINT_EXPR, TREE_TYPE (result), retval);
/* This part _must_ come second, because expand_return looks for
the INIT_EXPR as the toplevel node only. :-( */
retval = build (INIT_EXPR, TREE_TYPE (result), result, retval);
TREE_SIDE_EFFECTS (retval) = 1;
expand_return (retval);
@ -7190,7 +7211,8 @@ c_expand_start_case (exp)
exp = index;
}
expand_start_case (1, exp, type, "switch statement");
expand_start_case (1, build1 (CLEANUP_POINT_EXPR, TREE_TYPE (exp), exp),
type, "switch statement");
return exp;
}

View File

@ -329,7 +329,7 @@ ack (s, v, v2)
silly. So instead, we just do the equivalent of a call to fatal in the
same situation (call exit). */
/* First used: 0 (reserved), Last used: 360. Free: */
/* First used: 0 (reserved), Last used: 360. Free: 261. */
static int abortcount = 0;
@ -599,6 +599,7 @@ store_init_value (decl, init)
))
return value;
#if 0 /* No, that's C. jason 9/19/94 */
else
{
if (pedantic && TREE_CODE (value) == CONSTRUCTOR
@ -613,6 +614,7 @@ store_init_value (decl, init)
pedwarn ("ANSI C++ forbids non-constant aggregate initializer expressions");
}
}
#endif
DECL_INITIAL (decl) = value;
return NULL_TREE;
}
@ -631,7 +633,7 @@ digest_init (type, init, tail)
tree type, init, *tail;
{
enum tree_code code = TREE_CODE (type);
tree element = 0;
tree element = NULL_TREE;
tree old_tail_contents;
/* Nonzero if INIT is a braced grouping, which comes in as a CONSTRUCTOR
tree node which has no TREE_TYPE. */
@ -659,8 +661,9 @@ digest_init (type, init, tail)
if (init && TYPE_PTRMEMFUNC_P (type)
&& ((TREE_CODE (init) == ADDR_EXPR
&& TREE_CODE (TREE_TYPE (init)) == POINTER_TYPE
&& TREE_CODE (TREE_TYPE (TREE_TYPE (init))) == METHOD_TYPE)
&& ((TREE_CODE (TREE_TYPE (init)) == POINTER_TYPE
&& TREE_CODE (TREE_TYPE (TREE_TYPE (init))) == METHOD_TYPE)
|| TREE_CODE (TREE_OPERAND (init, 0)) == TREE_LIST))
|| TREE_CODE (init) == TREE_LIST
|| integer_zerop (init)
|| (TREE_TYPE (init) && TYPE_PTRMEMFUNC_P (TREE_TYPE (init)))))
@ -903,7 +906,7 @@ process_init_constructor (type, init, elts)
{
error ("non-empty initializer for array of empty elements");
/* Just ignore what we were supposed to use. */
tail1 = 0;
tail1 = NULL_TREE;
}
tail = tail1;
}
@ -1151,7 +1154,9 @@ build_scoped_ref (datum, types)
if (TREE_CODE (types) == SCOPE_REF)
{
/* We have some work to do. */
struct type_chain { tree type; struct type_chain *next; } *chain = 0, *head = 0, scratch;
struct type_chain
{ tree type; struct type_chain *next; }
*chain = NULL, *head = NULL, scratch;
ref = build_unary_op (ADDR_EXPR, datum, 0);
while (TREE_CODE (types) == SCOPE_REF)
{

File diff suppressed because it is too large Load Diff

View File

@ -145,14 +145,14 @@ bc_sym_write (file)
{
fprintf (file, "\n\t.comm ");
prsym (file, s->name);
fprintf (file, ", %d\n", s->val);
fprintf (file, ", %lu\n", s->val);
}
}
else if (s->common)
{
fprintf (file, "\n\t.lcomm ");
prsym (file, s->name);
fprintf (file, ", %d\n", s->val);
fprintf (file, ", %lu\n", s->val);
}
}
}

View File

@ -253,6 +253,19 @@ decl_attributes (decl, attributes)
else
warning_with_decl (decl, "`const' attribute ignored");
}
else if (TREE_VALUE (a) == get_identifier ("transparent_union"))
{
if (TREE_CODE (decl) == PARM_DECL
&& TREE_CODE (type) == UNION_TYPE
&& TYPE_MODE (type) == DECL_MODE (TYPE_FIELDS (type)))
DECL_TRANSPARENT_UNION (decl) = 1;
else if (TREE_CODE (decl) == TYPE_DECL
&& TREE_CODE (type) == UNION_TYPE
&& TYPE_MODE (type) == DECL_MODE (TYPE_FIELDS (type)))
TYPE_TRANSPARENT_UNION (type) = 1;
else
warning_with_decl (decl, "`transparent_union' attribute ignored");
}
else if (TREE_CODE (name) != TREE_LIST)
{
#ifdef VALID_MACHINE_ATTRIBUTE
@ -518,7 +531,7 @@ static format_char_info print_char_table[] = {
{ "s", 1, T_C, NULL, T_W, NULL, NULL, "-wp" },
{ "S", 1, T_W, NULL, NULL, NULL, NULL, "-wp" },
{ "p", 1, T_V, NULL, NULL, NULL, NULL, "-w" },
{ "n", 1, T_I, T_S, T_L, NULL, NULL, "" },
{ "n", 1, T_I, T_S, T_L, T_LL, NULL, "" },
{ NULL }
};
@ -531,7 +544,7 @@ static format_char_info scan_char_table[] = {
{ "C", 1, T_W, NULL, NULL, NULL, NULL, "*" },
{ "S", 1, T_W, NULL, NULL, NULL, NULL, "*" },
{ "p", 2, T_V, NULL, NULL, NULL, NULL, "*" },
{ "n", 1, T_I, T_S, T_L, NULL, NULL, "" },
{ "n", 1, T_I, T_S, T_L, T_LL, NULL, "" },
{ NULL }
};
@ -1340,6 +1353,37 @@ type_for_mode (mode, unsignedp)
return 0;
}
/* Return the minimum number of bits needed to represent VALUE in a
signed or unsigned type, UNSIGNEDP says which. */
int
min_precision (value, unsignedp)
tree value;
int unsignedp;
{
int log;
/* If the value is negative, compute its negative minus 1. The latter
adjustment is because the absolute value of the largest negative value
is one larger than the largest positive value. This is equivalent to
a bit-wise negation, so use that operation instead. */
if (tree_int_cst_sgn (value) < 0)
value = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (value), value));
/* Return the number of bits needed, taking into account the fact
that we need one more bit for a signed than unsigned type. */
if (integer_zerop (value))
log = 0;
else if (TREE_INT_CST_HIGH (value) != 0)
log = HOST_BITS_PER_WIDE_INT + floor_log2 (TREE_INT_CST_HIGH (value));
else
log = floor_log2 (TREE_INT_CST_LOW (value));
return log + 1 + ! unsignedp;
}
/* Print an error message for invalid operands to arith operation CODE.
NOP_EXPR is used as a special case (see truthvalue_conversion). */
@ -1451,9 +1495,11 @@ shorten_compare (op0_ptr, op1_ptr, restype_ptr, rescode_ptr)
real2 = TREE_CODE (TREE_TYPE (primop1)) == REAL_TYPE;
/* If first arg is constant, swap the args (changing operation
so value is preserved), for canonicalization. */
so value is preserved), for canonicalization. Don't do this if
the second arg is 0. */
if (TREE_CONSTANT (primop0))
if (TREE_CONSTANT (primop0)
&& ! integer_zerop (primop1) && ! real_zerop (primop1))
{
register tree tem = primop0;
register int temi = unsignedp0;
@ -1692,13 +1738,23 @@ shorten_compare (op0_ptr, op1_ptr, restype_ptr, rescode_ptr)
switch (code)
{
case GE_EXPR:
if (extra_warnings)
/* All unsigned values are >= 0, so we warn if extra warnings
are requested. However, if OP0 is a constant that is
>= 0, the signedness of the comparison isn't an issue,
so suppress the warning. */
if (extra_warnings
&& ! (TREE_CODE (primop0) == INTEGER_CST
&& ! TREE_OVERFLOW (convert (signed_type (type),
primop0))))
warning ("unsigned value >= 0 is always 1");
value = integer_one_node;
break;
case LT_EXPR:
if (extra_warnings)
if (extra_warnings
&& ! (TREE_CODE (primop0) == INTEGER_CST
&& ! TREE_OVERFLOW (convert (signed_type (type),
primop0))))
warning ("unsigned value < 0 is always 0");
value = integer_zero_node;
}

View File

@ -443,7 +443,8 @@ emit_call_1 (funexp, funtype, stack_size, struct_value_size, next_arg_reg,
{
if (!already_popped)
CALL_INSN_FUNCTION_USAGE (call_insn) =
gen_rtx (EXPR_LIST, CLOBBER, stack_pointer_rtx,
gen_rtx (EXPR_LIST, VOIDmode,
gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx),
CALL_INSN_FUNCTION_USAGE (call_insn));
stack_size -= RETURN_POPS_ARGS (funtype, stack_size);
stack_size_rtx = GEN_INT (stack_size);
@ -667,9 +668,6 @@ expand_call (exp, target, ignore)
{
struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
if (struct_value_size < 0)
abort ();
if (target && GET_CODE (target) == MEM)
structure_value_addr = XEXP (target, 0);
else
@ -680,6 +678,9 @@ expand_call (exp, target, ignore)
specified. If we were to allocate space on the stack here,
we would have no way of knowing when to free it. */
if (struct_value_size < 0)
abort ();
structure_value_addr
= XEXP (assign_stack_temp (BLKmode, struct_value_size, 1), 0);
target = 0;
@ -901,15 +902,18 @@ expand_call (exp, target, ignore)
as if it were an extra parameter. */
if (structure_value_addr && struct_value_rtx == 0)
{
/* If structure_value_addr is a REG other than
virtual_outgoing_args_rtx, we can use always use it. If it
is not a REG, we must always copy it into a register.
If it is virtual_outgoing_args_rtx, we must copy it to another
register in some cases. */
rtx temp = (GET_CODE (structure_value_addr) != REG
#ifdef ACCUMULATE_OUTGOING_ARGS
/* If the stack will be adjusted, make sure the structure address
does not refer to virtual_outgoing_args_rtx. */
rtx temp = (stack_arg_under_construction
? copy_addr_to_reg (structure_value_addr)
: force_reg (Pmode, structure_value_addr));
#else
rtx temp = force_reg (Pmode, structure_value_addr);
|| (stack_arg_under_construction
&& structure_value_addr == virtual_outgoing_args_rtx)
#endif
? copy_addr_to_reg (structure_value_addr)
: structure_value_addr);
actparms
= tree_cons (error_mark_node,
@ -979,6 +983,12 @@ expand_call (exp, target, ignore)
if (type == error_mark_node || TYPE_SIZE (type) == 0)
args[i].tree_value = integer_zero_node, type = integer_type_node;
/* If TYPE is a transparent union, pass things the way we would
pass the first field of the union. We have already verified that
the modes are the same. */
if (TYPE_TRANSPARENT_UNION (type))
type = TREE_TYPE (TYPE_FIELDS (type));
/* Decide where to pass this arg.
args[i].reg is nonzero if all or part is passed in registers.
@ -1343,98 +1353,103 @@ expand_call (exp, target, ignore)
}
argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
}
else if (must_preallocate)
else
{
/* Note that we must go through the motions of allocating an argument
block even if the size is zero because we may be storing args
in the area reserved for register arguments, which may be part of
the stack frame. */
int needed = args_size.constant;
#ifdef ACCUMULATE_OUTGOING_ARGS
/* Store the maximum argument space used. It will be pushed by the
prologue.
Since the stack pointer will never be pushed, it is possible for
the evaluation of a parm to clobber something we have already
written to the stack. Since most function calls on RISC machines
do not use the stack, this is uncommon, but must work correctly.
Therefore, we save any area of the stack that was already written
and that we are using. Here we set up to do this by making a new
stack usage map from the old one. The actual save will be done
by store_one_arg.
Another approach might be to try to reorder the argument
evaluations to avoid this conflicting stack usage. */
prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow checking). */
if (needed > current_function_outgoing_args_size)
current_function_outgoing_args_size = needed;
if (must_preallocate)
{
#ifdef ACCUMULATE_OUTGOING_ARGS
/* Since the stack pointer will never be pushed, it is possible for
the evaluation of a parm to clobber something we have already
written to the stack. Since most function calls on RISC machines
do not use the stack, this is uncommon, but must work correctly.
Therefore, we save any area of the stack that was already written
and that we are using. Here we set up to do this by making a new
stack usage map from the old one. The actual save will be done
by store_one_arg.
Another approach might be to try to reorder the argument
evaluations to avoid this conflicting stack usage. */
#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
/* Since we will be writing into the entire argument area, the
map must be allocated for its entire size, not just the part that
is the responsibility of the caller. */
needed += reg_parm_stack_space;
/* Since we will be writing into the entire argument area, the
map must be allocated for its entire size, not just the part that
is the responsibility of the caller. */
needed += reg_parm_stack_space;
#endif
#ifdef ARGS_GROW_DOWNWARD
highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
needed + 1);
highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
needed + 1);
#else
highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, needed);
highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
needed);
#endif
stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
if (initial_highest_arg_in_use)
bcopy (initial_stack_usage_map, stack_usage_map,
initial_highest_arg_in_use);
if (initial_highest_arg_in_use)
bcopy (initial_stack_usage_map, stack_usage_map,
initial_highest_arg_in_use);
if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
bzero (&stack_usage_map[initial_highest_arg_in_use],
highest_outgoing_arg_in_use - initial_highest_arg_in_use);
needed = 0;
if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
bzero (&stack_usage_map[initial_highest_arg_in_use],
highest_outgoing_arg_in_use - initial_highest_arg_in_use);
needed = 0;
/* The address of the outgoing argument list must not be copied to a
register here, because argblock would be left pointing to the
wrong place after the call to allocate_dynamic_stack_space below. */
/* The address of the outgoing argument list must not be copied to a
register here, because argblock would be left pointing to the
wrong place after the call to allocate_dynamic_stack_space below.
*/
argblock = virtual_outgoing_args_rtx;
argblock = virtual_outgoing_args_rtx;
#else /* not ACCUMULATE_OUTGOING_ARGS */
if (inhibit_defer_pop == 0)
{
/* Try to reuse some or all of the pending_stack_adjust
to get this space. Maybe we can avoid any pushing. */
if (needed > pending_stack_adjust)
if (inhibit_defer_pop == 0)
{
needed -= pending_stack_adjust;
pending_stack_adjust = 0;
/* Try to reuse some or all of the pending_stack_adjust
to get this space. Maybe we can avoid any pushing. */
if (needed > pending_stack_adjust)
{
needed -= pending_stack_adjust;
pending_stack_adjust = 0;
}
else
{
pending_stack_adjust -= needed;
needed = 0;
}
}
/* Special case this because overhead of `push_block' in this
case is non-trivial. */
if (needed == 0)
argblock = virtual_outgoing_args_rtx;
else
{
pending_stack_adjust -= needed;
needed = 0;
}
}
/* Special case this because overhead of `push_block' in this
case is non-trivial. */
if (needed == 0)
argblock = virtual_outgoing_args_rtx;
else
argblock = push_block (GEN_INT (needed), 0, 0);
argblock = push_block (GEN_INT (needed), 0, 0);
/* We only really need to call `copy_to_reg' in the case where push
insns are going to be used to pass ARGBLOCK to a function
call in ARGS. In that case, the stack pointer changes value
from the allocation point to the call point, and hence
the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
But might as well always do it. */
argblock = copy_to_reg (argblock);
/* We only really need to call `copy_to_reg' in the case where push
insns are going to be used to pass ARGBLOCK to a function
call in ARGS. In that case, the stack pointer changes value
from the allocation point to the call point, and hence
the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
But might as well always do it. */
argblock = copy_to_reg (argblock);
#endif /* not ACCUMULATE_OUTGOING_ARGS */
}
}
#ifdef ACCUMULATE_OUTGOING_ARGS
/* The save/restore code in store_one_arg handles all cases except one:
a constructor call (including a C function returning a BLKmode struct)
@ -2193,7 +2208,7 @@ emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
be viewed as just an efficiency improvement. */
rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
emit_move_insn (slot, val);
val = XEXP (slot, 0);
val = force_operand (XEXP (slot, 0), NULL_RTX);
mode = Pmode;
}
#endif
@ -2266,9 +2281,10 @@ emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
#endif
#endif
#ifdef ACCUMULATE_OUTGOING_ARGS
if (args_size.constant > current_function_outgoing_args_size)
current_function_outgoing_args_size = args_size.constant;
#ifdef ACCUMULATE_OUTGOING_ARGS
args_size.constant = 0;
#endif
@ -2613,9 +2629,10 @@ emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
#endif
#endif
#ifdef ACCUMULATE_OUTGOING_ARGS
if (args_size.constant > current_function_outgoing_args_size)
current_function_outgoing_args_size = args_size.constant;
#ifdef ACCUMULATE_OUTGOING_ARGS
args_size.constant = 0;
#endif

View File

@ -170,11 +170,6 @@ static int last_call_cuid;
static rtx subst_insn;
/* If nonzero, this is the insn that should be presumed to be
immediately in front of `subst_insn'. */
static rtx subst_prev_insn;
/* This is the lowest CUID that `subst' is currently dealing with.
get_last_value will not return a value if the register was set at or
after this CUID. If not for this mechanism, we could get confused if
@ -880,7 +875,11 @@ can_combine_p (insn, i3, pred, succ, pdest, psrc)
/* Can't merge a function call. */
|| GET_CODE (src) == CALL
/* Don't eliminate a function call argument. */
|| (GET_CODE (i3) == CALL_INSN && find_reg_fusage (i3, USE, dest))
|| (GET_CODE (i3) == CALL_INSN
&& (find_reg_fusage (i3, USE, dest)
|| (GET_CODE (dest) == REG
&& REGNO (dest) < FIRST_PSEUDO_REGISTER
&& global_regs[REGNO (dest)])))
/* Don't substitute into an incremented register. */
|| FIND_REG_INC_NOTE (i3, dest)
|| (succ && FIND_REG_INC_NOTE (succ, dest))
@ -1203,6 +1202,8 @@ try_combine (i3, i2, i1)
rtx new_i3_notes, new_i2_notes;
/* Notes that we substituted I3 into I2 instead of the normal case. */
int i3_subst_into_i2 = 0;
/* Notes that I1, I2 or I3 is a MULT operation. */
int have_mult = 0;
int maxreg;
rtx temp;
@ -1238,7 +1239,6 @@ try_combine (i3, i2, i1)
if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
temp = i1, i1 = i2, i2 = temp;
subst_prev_insn = 0;
added_links_insn = 0;
/* First check for one important special-case that the code below will
@ -1354,9 +1354,8 @@ try_combine (i3, i2, i1)
never appear in the insn stream so giving it the same INSN_UID
as I2 will not cause a problem. */
subst_prev_insn = i1
= gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
@ -1392,6 +1391,15 @@ try_combine (i3, i2, i1)
return 0;
}
/* See if any of the insns is a MULT operation. Unless one is, we will
reject a combination that is, since it must be slower. Be conservative
here. */
if (GET_CODE (i2src) == MULT
|| (i1 != 0 && GET_CODE (i1src) == MULT)
|| (GET_CODE (PATTERN (i3)) == SET
&& GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
have_mult = 1;
/* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
We used to do this EXCEPT in one case: I3 has a post-inc in an
output operand. However, that exception can give rise to insns like
@ -1601,7 +1609,11 @@ try_combine (i3, i2, i1)
really no reason to). */
|| max_reg_num () != maxreg
/* Fail if we couldn't do something and have a CLOBBER. */
|| GET_CODE (newpat) == CLOBBER)
|| GET_CODE (newpat) == CLOBBER
/* Fail if this new pattern is a MULT and we didn't have one before
at the outer level. */
|| (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
&& ! have_mult))
{
undo_all ();
return 0;
@ -1804,13 +1816,14 @@ try_combine (i3, i2, i1)
&& ! reg_referenced_p (i2dest, newpat))
{
rtx newdest = i2dest;
enum rtx_code split_code = GET_CODE (*split);
enum machine_mode split_mode = GET_MODE (*split);
/* Get NEWDEST as a register in the proper mode. We have already
validated that we can do this. */
if (GET_MODE (i2dest) != GET_MODE (*split)
&& GET_MODE (*split) != VOIDmode)
if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
{
newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
newdest = gen_rtx (REG, split_mode, REGNO (i2dest));
if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
@ -1819,25 +1832,27 @@ try_combine (i3, i2, i1)
/* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
an ASHIFT. This can occur if it was inside a PLUS and hence
appeared to be a memory address. This is a kludge. */
if (GET_CODE (*split) == MULT
if (split_code == MULT
&& GET_CODE (XEXP (*split, 1)) == CONST_INT
&& (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
XEXP (*split, 0), GEN_INT (i)));
#ifdef INSN_SCHEDULING
/* If *SPLIT is a paradoxical SUBREG, when we split it, it should
be written as a ZERO_EXTEND. */
if (GET_CODE (*split) == SUBREG
&& GET_CODE (SUBREG_REG (*split)) == MEM)
SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
XEXP (*split, 0)));
#endif
newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
SUBST (*split, newdest);
i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
if (i2_code_number >= 0)
/* If the split point was a MULT and we didn't have one before,
don't use one now. */
if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
}
}
@ -3261,10 +3276,7 @@ simplify_rtx (x, op0_mode, last, in_dest)
&& (temp = simplify_unary_operation (NOT, mode,
XEXP (XEXP (x, 0), 1),
mode)) != 0)
{
SUBST (XEXP (XEXP (x, 0), 1), temp);
return XEXP (x, 0);
}
return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
/* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
other than 1, but that is not valid. We could do a similar
@ -3436,6 +3448,13 @@ simplify_rtx (x, op0_mode, last, in_dest)
&& GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
XEXP (XEXP (XEXP (x, 0), 0), 0));
/* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
is (float_truncate:SF x). */
if (GET_CODE (XEXP (x, 0)) == SUBREG
&& subreg_lowpart_p (XEXP (x, 0))
&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
return SUBREG_REG (XEXP (x, 0));
break;
#ifdef HAVE_cc0
@ -4354,6 +4373,7 @@ simplify_set (x)
&& GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
&& (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
&& XEXP (XEXP (src, 0), 1) == const0_rtx
&& GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
&& (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
GET_MODE (XEXP (XEXP (src, 0), 0)))
== GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
@ -6874,8 +6894,10 @@ nonzero_bits (x, mode)
int width1 = floor_log2 (nz1) + 1;
int low0 = floor_log2 (nz0 & -nz0);
int low1 = floor_log2 (nz1 & -nz1);
int op0_maybe_minusp = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
int op1_maybe_minusp = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
HOST_WIDE_INT op0_maybe_minusp
= (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
HOST_WIDE_INT op1_maybe_minusp
= (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
int result_width = mode_width;
int result_low = 0;
@ -8618,6 +8640,7 @@ simplify_comparison (code, pop0, pop1)
{
op0 = gen_lowpart_for_combine (tmode, inner_op0);
op1 = gen_lowpart_for_combine (tmode, inner_op1);
code = unsigned_condition (code);
changed = 1;
break;
}
@ -8819,6 +8842,7 @@ simplify_comparison (code, pop0, pop1)
{
const_op = 0, op1 = const0_rtx;
code = LT;
break;
}
else
break;
@ -9811,15 +9835,10 @@ get_last_value (x)
{
rtx insn, set;
/* If there is an insn that is supposed to be immediately
in front of subst_insn, use it. */
if (subst_prev_insn != 0)
insn = subst_prev_insn;
else
for (insn = prev_nonnote_insn (subst_insn);
insn && INSN_CUID (insn) >= subst_low_cuid;
insn = prev_nonnote_insn (insn))
;
for (insn = prev_nonnote_insn (subst_insn);
insn && INSN_CUID (insn) >= subst_low_cuid;
insn = prev_nonnote_insn (insn))
;
if (insn
&& (set = single_set (insn)) != 0
@ -10332,10 +10351,7 @@ distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
switch (REG_NOTE_KIND (note))
{
case REG_UNUSED:
/* If this note is from any insn other than i3, then we have no
use for it, and must ignore it.
Any clobbers for i3 may still exist, and so we must process
/* Any clobbers for i3 may still exist, and so we must process
REG_UNUSED notes from that insn.
Any clobbers from i2 or i1 can only exist if they were added by
@ -10345,14 +10361,18 @@ distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
if it is for the same register as the original i3 dest.
In that case, we will notice that the register is set in i3,
and then add a REG_UNUSED note for the destination of i3, which
is wrong. */
if (from_insn != i3)
break;
is wrong. However, it is possible to have REG_UNUSED notes from
i2 or i1 for register which were both used and clobbered, so
we keep notes from i2 or i1 if they will turn into REG_DEAD
notes. */
/* If this register is set or clobbered in I3, put the note there
unless there is one already. */
else if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
{
if (from_insn != i3)
break;
if (! (GET_CODE (XEXP (note, 0)) == REG
? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
: find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
@ -10633,7 +10653,9 @@ distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
{
rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
if (reg_referenced_p (piece, PATTERN (place))
if ((reg_referenced_p (piece, PATTERN (place))
|| (GET_CODE (place) == CALL_INSN
&& find_reg_fusage (place, USE, piece)))
&& ! dead_or_set_p (place, piece)
&& ! reg_bitfield_target_p (piece,
PATTERN (place)))

View File

@ -618,7 +618,7 @@ static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
enum machine_mode));
static void merge_equiv_classes PROTO((struct table_elt *,
struct table_elt *));
static void invalidate PROTO((rtx));
static void invalidate PROTO((rtx, enum machine_mode));
static void remove_invalid_refs PROTO((int));
static void rehash_using_reg PROTO((rtx));
static void invalidate_memory PROTO((struct write_data *));
@ -1466,7 +1466,10 @@ merge_equiv_classes (class1, class2)
remove_from_table (elt, hash);
if (insert_regs (exp, class1, 0))
hash = HASH (exp, mode);
{
rehash_using_reg (exp);
hash = HASH (exp, mode);
}
new = insert (exp, class1, hash, mode);
new->in_memory = hash_arg_in_memory;
new->in_struct = hash_arg_in_struct;
@ -1480,14 +1483,18 @@ merge_equiv_classes (class1, class2)
(because, when a memory reference with a varying address is stored in,
all memory references are removed by invalidate_memory
so specific invalidation is superfluous).
FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
instead of just the amount indicated by the mode of X. This is only used
for bitfield stores into memory.
A nonvarying address may be just a register or just
a symbol reference, or it may be either of those plus
a numeric offset. */
static void
invalidate (x)
invalidate (x, full_mode)
rtx x;
enum machine_mode full_mode;
{
register int i;
register struct table_elt *p;
@ -1562,7 +1569,7 @@ invalidate (x)
{
if (GET_CODE (SUBREG_REG (x)) != REG)
abort ();
invalidate (SUBREG_REG (x));
invalidate (SUBREG_REG (x), VOIDmode);
return;
}
@ -1573,7 +1580,10 @@ invalidate (x)
if (GET_CODE (x) != MEM)
abort ();
set_nonvarying_address_components (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x)),
if (full_mode == VOIDmode)
full_mode = GET_MODE (x);
set_nonvarying_address_components (XEXP (x, 0), GET_MODE_SIZE (full_mode),
&base, &start, &end);
for (i = 0; i < NBUCKETS; i++)
@ -1711,7 +1721,7 @@ invalidate_for_call ()
if (reg_tick[regno] >= 0)
reg_tick[regno]++;
in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, regno);
in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
}
/* In the case where we have no call-clobbered hard registers in the
@ -1948,17 +1958,6 @@ canon_hash (x, mode)
if (fmt[i] == 'e')
{
rtx tem = XEXP (x, i);
rtx tem1;
/* If the operand is a REG that is equivalent to a constant, hash
as if we were hashing the constant, since we will be comparing
that way. */
if (tem != 0 && GET_CODE (tem) == REG
&& REGNO_QTY_VALID_P (REGNO (tem))
&& qty_mode[reg_qty[REGNO (tem)]] == GET_MODE (tem)
&& (tem1 = qty_const[reg_qty[REGNO (tem)]]) != 0
&& CONSTANT_P (tem1))
tem = tem1;
/* If we are about to do the last recursive call
needed at this level, change it into iteration.
@ -2230,9 +2229,10 @@ refers_to_p (x, y)
set PBASE, PSTART, and PEND which correspond to the base of the address,
the starting offset, and ending offset respectively.
ADDR is known to be a nonvarying address.
ADDR is known to be a nonvarying address. */
cse_address_varies_p returns zero for nonvarying addresses. */
/* ??? Despite what the comments say, this function is in fact frequently
passed varying addresses. This does not appear to cause any problems. */
static void
set_nonvarying_address_components (addr, size, pbase, pstart, pend)
@ -2323,6 +2323,12 @@ set_nonvarying_address_components (addr, size, pbase, pstart, pend)
break;
}
if (GET_CODE (base) == CONST_INT)
{
start += INTVAL (base);
base = const0_rtx;
}
end = start + size;
/* Set the return values. */
@ -2353,13 +2359,6 @@ refers_to_mem_p (x, base, start, end)
register enum rtx_code code;
register char *fmt;
if (GET_CODE (base) == CONST_INT)
{
start += INTVAL (base);
end += INTVAL (base);
base = const0_rtx;
}
repeat:
if (x == 0)
return 0;
@ -3086,7 +3085,7 @@ simplify_unary_operation (code, mode, op, op_mode)
/* We can do some operations on integer CONST_DOUBLEs. Also allow
for a DImode operation on a CONST_INT. */
else if (GET_MODE (op) == VOIDmode && width == HOST_BITS_PER_INT * 2
else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
&& (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
{
HOST_WIDE_INT l1, h1, lv, hv;
@ -3123,10 +3122,8 @@ simplify_unary_operation (code, mode, op, op_mode)
break;
case TRUNCATE:
if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
return GEN_INT (l1 & GET_MODE_MASK (mode));
else
return 0;
/* This is just a change-of-mode, so do nothing. */
lv = l1, hv = h1;
break;
case ZERO_EXTEND:
@ -3220,7 +3217,10 @@ simplify_unary_operation (code, mode, op, op_mode)
set_float_handler (NULL_PTR);
return x;
}
else if (GET_CODE (op) == CONST_DOUBLE && GET_MODE_CLASS (mode) == MODE_INT
else if (GET_CODE (op) == CONST_DOUBLE
&& GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
&& GET_MODE_CLASS (mode) == MODE_INT
&& width <= HOST_BITS_PER_WIDE_INT && width > 0)
{
REAL_VALUE_TYPE d;
@ -5001,7 +5001,11 @@ fold_rtx (x, insn)
if (GET_MODE (table) != Pmode)
new = gen_rtx (TRUNCATE, GET_MODE (table), new);
return new;
/* Indicate this is a constant. This isn't a
valid form of CONST, but it will only be used
to fold the next insns and then discarded, so
it should be safe. */
return gen_rtx (CONST, GET_MODE (new), new);
}
}
}
@ -5174,13 +5178,26 @@ fold_rtx (x, insn)
switch (GET_RTX_CLASS (code))
{
case '1':
/* We can't simplify extension ops unless we know the original mode. */
if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
&& mode_arg0 == VOIDmode)
break;
new = simplify_unary_operation (code, mode,
const_arg0 ? const_arg0 : folded_arg0,
mode_arg0);
{
int is_const = 0;
/* We can't simplify extension ops unless we know the
original mode. */
if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
&& mode_arg0 == VOIDmode)
break;
/* If we had a CONST, strip it off and put it back later if we
fold. */
if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
is_const = 1, const_arg0 = XEXP (const_arg0, 0);
new = simplify_unary_operation (code, mode,
const_arg0 ? const_arg0 : folded_arg0,
mode_arg0);
if (new != 0 && is_const)
new = gen_rtx (CONST, mode, new);
}
break;
case '<':
@ -5353,11 +5370,41 @@ fold_rtx (x, insn)
ADDR_DIFF_VEC table. */
if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
{
rtx y = lookup_as_function (folded_arg0, MINUS);
rtx y
= GET_CODE (folded_arg0) == MINUS ? folded_arg0
: lookup_as_function (folded_arg0, MINUS);
if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
&& XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
return XEXP (y, 0);
/* Now try for a CONST of a MINUS like the above. */
if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
: lookup_as_function (folded_arg0, CONST))) != 0
&& GET_CODE (XEXP (y, 0)) == MINUS
&& GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
&& XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
return XEXP (XEXP (y, 0), 0);
}
/* Likewise if the operands are in the other order. */
if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
{
rtx y
= GET_CODE (folded_arg1) == MINUS ? folded_arg1
: lookup_as_function (folded_arg1, MINUS);
if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
&& XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
return XEXP (y, 0);
/* Now try for a CONST of a MINUS like the above. */
if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
: lookup_as_function (folded_arg1, CONST))) != 0
&& GET_CODE (XEXP (y, 0)) == MINUS
&& GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
&& XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
return XEXP (XEXP (y, 0), 0);
}
/* If second operand is a register equivalent to a negative
@ -5759,6 +5806,13 @@ record_jump_cond (code, mode, op0, op1, reversed_nonequality)
op0_elt = lookup (op0, op0_hash, mode);
op1_elt = lookup (op1, op1_hash, mode);
/* If both operands are already equivalent or if they are not in the
table but are identical, do nothing. */
if ((op0_elt != 0 && op1_elt != 0
&& op0_elt->first_same_value == op1_elt->first_same_value)
|| op0 == op1 || rtx_equal_p (op0, op1))
return;
/* If we aren't setting two things equal all we can do is save this
comparison. Similarly if this is floating-point. In the latter
case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
@ -5945,7 +5999,7 @@ cse_insn (insn, in_libcall_block)
{
for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
invalidate (SET_DEST (XEXP (tem, 0)));
invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
}
if (GET_CODE (x) == SET)
@ -5976,7 +6030,7 @@ cse_insn (insn, in_libcall_block)
canon_reg (SET_SRC (x), insn);
apply_change_group ();
fold_rtx (SET_SRC (x), insn);
invalidate (SET_DEST (x));
invalidate (SET_DEST (x), VOIDmode);
}
else
n_sets = 1;
@ -6007,10 +6061,10 @@ cse_insn (insn, in_libcall_block)
if (GET_CODE (clobbered) == REG
|| GET_CODE (clobbered) == SUBREG)
invalidate (clobbered);
invalidate (clobbered, VOIDmode);
else if (GET_CODE (clobbered) == STRICT_LOW_PART
|| GET_CODE (clobbered) == ZERO_EXTRACT)
invalidate (XEXP (clobbered, 0));
invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
}
}
@ -6026,7 +6080,7 @@ cse_insn (insn, in_libcall_block)
canon_reg (SET_SRC (y), insn);
apply_change_group ();
fold_rtx (SET_SRC (y), insn);
invalidate (SET_DEST (y));
invalidate (SET_DEST (y), VOIDmode);
}
else if (SET_DEST (y) == pc_rtx
&& GET_CODE (SET_SRC (y)) == LABEL_REF)
@ -6663,7 +6717,11 @@ cse_insn (insn, in_libcall_block)
else if (constant_pool_entries_cost
&& CONSTANT_P (trial)
&& (src_folded == 0 || GET_CODE (src_folded) != MEM)
&& ! (GET_CODE (trial) == CONST
&& GET_CODE (XEXP (trial, 0)) == TRUNCATE)
&& (src_folded == 0
|| (GET_CODE (src_folded) != MEM
&& ! src_folded_force_flag))
&& GET_MODE_CLASS (mode) != MODE_CC)
{
src_folded_force_flag = 1;
@ -6916,10 +6974,10 @@ cse_insn (insn, in_libcall_block)
{
if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
|| GET_CODE (dest) == MEM)
invalidate (dest);
invalidate (dest, VOIDmode);
else if (GET_CODE (dest) == STRICT_LOW_PART
|| GET_CODE (dest) == ZERO_EXTRACT)
invalidate (XEXP (dest, 0));
invalidate (XEXP (dest, 0), GET_MODE (dest));
sets[i].rtl = 0;
}
@ -6965,7 +7023,10 @@ cse_insn (insn, in_libcall_block)
classp = 0;
}
if (insert_regs (src_eqv, classp, 0))
src_eqv_hash = HASH (src_eqv, eqvmode);
{
rehash_using_reg (src_eqv);
src_eqv_hash = HASH (src_eqv, eqvmode);
}
elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
elt->in_memory = src_eqv_in_memory;
elt->in_struct = src_eqv_in_struct;
@ -7012,7 +7073,10 @@ cse_insn (insn, in_libcall_block)
any of the src_elt's, because they would have failed to
match if not still valid. */
if (insert_regs (src, classp, 0))
sets[i].src_hash = HASH (src, mode);
{
rehash_using_reg (src);
sets[i].src_hash = HASH (src, mode);
}
elt = insert (src, classp, sets[i].src_hash, mode);
elt->in_memory = sets[i].src_in_memory;
elt->in_struct = sets[i].src_in_struct;
@ -7054,18 +7118,21 @@ cse_insn (insn, in_libcall_block)
for (i = 0; i < n_sets; i++)
if (sets[i].rtl)
{
register rtx dest = sets[i].inner_dest;
/* We can't use the inner dest, because the mode associated with
a ZERO_EXTRACT is significant. */
register rtx dest = SET_DEST (sets[i].rtl);
/* Needed for registers to remove the register from its
previous quantity's chain.
Needed for memory if this is a nonvarying address, unless
we have just done an invalidate_memory that covers even those. */
if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
|| (! writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
invalidate (dest);
|| (GET_CODE (dest) == MEM && ! writes_memory.all
&& ! cse_rtx_addr_varies_p (dest)))
invalidate (dest, VOIDmode);
else if (GET_CODE (dest) == STRICT_LOW_PART
|| GET_CODE (dest) == ZERO_EXTRACT)
invalidate (XEXP (dest, 0));
invalidate (XEXP (dest, 0), GET_MODE (dest));
}
/* Make sure registers mentioned in destinations
@ -7122,7 +7189,17 @@ cse_insn (insn, in_libcall_block)
|| in_libcall_block
/* If we didn't put a REG_EQUAL value or a source into the hash
table, there is no point is recording DEST. */
|| sets[i].src_elt == 0)
|| sets[i].src_elt == 0
/* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
or SIGN_EXTEND, don't record DEST since it can cause
some tracking to be wrong.
??? Think about this more later. */
|| (GET_CODE (dest) == SUBREG
&& (GET_MODE_SIZE (GET_MODE (dest))
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
&& (GET_CODE (sets[i].src) == SIGN_EXTEND
|| GET_CODE (sets[i].src) == ZERO_EXTEND)))
continue;
/* STRICT_LOW_PART isn't part of the value BEING set,
@ -7134,9 +7211,12 @@ cse_insn (insn, in_libcall_block)
if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
/* Registers must also be inserted into chains for quantities. */
if (insert_regs (dest, sets[i].src_elt, 1))
/* If `insert_regs' changes something, the hash code must be
recalculated. */
sets[i].dest_hash = HASH (dest, GET_MODE (dest));
{
/* If `insert_regs' changes something, the hash code must be
recalculated. */
rehash_using_reg (dest);
sets[i].dest_hash = HASH (dest, GET_MODE (dest));
}
elt = insert (dest, sets[i].src_elt,
sets[i].dest_hash, GET_MODE (dest));
@ -7200,7 +7280,10 @@ cse_insn (insn, in_libcall_block)
if (src_elt == 0)
{
if (insert_regs (new_src, classp, 0))
src_hash = HASH (new_src, new_mode);
{
rehash_using_reg (new_src);
src_hash = HASH (new_src, new_mode);
}
src_elt = insert (new_src, classp, src_hash, new_mode);
src_elt->in_memory = elt->in_memory;
src_elt->in_struct = elt->in_struct;
@ -7377,7 +7460,7 @@ invalidate_from_clobbers (w, x)
/* This should be *very* rare. */
if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
invalidate (stack_pointer_rtx);
invalidate (stack_pointer_rtx, VOIDmode);
}
if (GET_CODE (x) == CLOBBER)
@ -7387,10 +7470,10 @@ invalidate_from_clobbers (w, x)
{
if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
|| (GET_CODE (ref) == MEM && ! w->all))
invalidate (ref);
invalidate (ref, VOIDmode);
else if (GET_CODE (ref) == STRICT_LOW_PART
|| GET_CODE (ref) == ZERO_EXTRACT)
invalidate (XEXP (ref, 0));
invalidate (XEXP (ref, 0), GET_MODE (ref));
}
}
else if (GET_CODE (x) == PARALLEL)
@ -7406,10 +7489,10 @@ invalidate_from_clobbers (w, x)
{
if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
|| (GET_CODE (ref) == MEM && !w->all))
invalidate (ref);
invalidate (ref, VOIDmode);
else if (GET_CODE (ref) == STRICT_LOW_PART
|| GET_CODE (ref) == ZERO_EXTRACT)
invalidate (XEXP (ref, 0));
invalidate (XEXP (ref, 0), GET_MODE (ref));
}
}
}
@ -7537,11 +7620,12 @@ cse_around_loop (loop_start)
for (p = last_jump_equiv_class->first_same_value; p;
p = p->next_same_value)
if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
|| GET_CODE (p->exp) == SUBREG)
invalidate (p->exp);
|| (GET_CODE (p->exp) == SUBREG
&& GET_CODE (SUBREG_REG (p->exp)) == REG))
invalidate (p->exp, VOIDmode);
else if (GET_CODE (p->exp) == STRICT_LOW_PART
|| GET_CODE (p->exp) == ZERO_EXTRACT)
invalidate (XEXP (p->exp, 0));
invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
/* Process insns starting after LOOP_START until we hit a CALL_INSN or
a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
@ -7600,10 +7684,10 @@ invalidate_skipped_set (dest, set)
if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
|| (! skipped_writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
invalidate (dest);
invalidate (dest, VOIDmode);
else if (GET_CODE (dest) == STRICT_LOW_PART
|| GET_CODE (dest) == ZERO_EXTRACT)
invalidate (XEXP (dest, 0));
invalidate (XEXP (dest, 0), GET_MODE (dest));
}
/* Invalidate all insns from START up to the end of the function or the
@ -7755,10 +7839,10 @@ cse_set_around_loop (x, insn, loop_start)
if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
|| (GET_CODE (SET_DEST (x)) == MEM && ! writes_memory.all
&& ! cse_rtx_addr_varies_p (SET_DEST (x))))
invalidate (SET_DEST (x));
invalidate (SET_DEST (x), VOIDmode);
else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
|| GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
invalidate (XEXP (SET_DEST (x), 0));
invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
}
/* Find the end of INSN's basic block and return its range,

View File

@ -1843,7 +1843,13 @@ dbxout_symbol_location (decl, type, suffix, home)
letter = decl_function_context (decl) ? 'V' : 'S';
if (!DECL_INITIAL (decl))
/* This should be the same condition as in assemble_variable, but
we don't have access to dont_output_data here. So, instead,
we rely on the fact that error_mark_node initializers always
end up in bss for C++ and never end up in bss for C. */
if (DECL_INITIAL (decl) == 0
|| (!strcmp (lang_identify (), "cplusplus")
&& DECL_INITIAL (decl) == error_mark_node))
current_sym_code = N_LCSYM;
else if (DECL_IN_TEXT_SECTION (decl))
/* This is not quite right, but it's the closest

View File

@ -4435,118 +4435,91 @@ output_decl (decl, containing_scope)
output_formal_types (TREE_TYPE (decl));
else
{
/* Generate DIEs to represent all known formal parameters */
register tree arg_decls = DECL_ARGUMENTS (decl);
register tree parm;
{
register tree last_arg;
/* WARNING! Kludge zone ahead! Here we have a special
hack for svr4 SDB compatibility. Instead of passing the
current FUNCTION_DECL node as the second parameter (i.e.
the `containing_scope' parameter) to `output_decl' (as
we ought to) we instead pass a pointer to our own private
fake_containing_scope node. That node is a RECORD_TYPE
node which NO OTHER TYPE may ever actually be a member of.
last_arg = (arg_decls && TREE_CODE (arg_decls) != ERROR_MARK)
? tree_last (arg_decls)
: NULL;
This pointer will ultimately get passed into `output_type'
as its `containing_scope' parameter. `Output_type' will
then perform its part in the hack... i.e. it will pend
the type of the formal parameter onto the pending_types
list. Later on, when we are done generating the whole
sequence of formal parameter DIEs for this function
definition, we will un-pend all previously pended types
of formal parameters for this function definition.
/* Generate DIEs to represent all known formal parameters, but
don't do it if this looks like a varargs function. A given
function is considered to be a varargs function if (and only
if) its last named argument is named `__builtin_va_alist'. */
This whole kludge prevents any type DIEs from being
mixed in with the formal parameter DIEs. That's good
because svr4 SDB believes that the list of formal
parameter DIEs for a function ends wherever the first
non-formal-parameter DIE appears. Thus, we have to
keep the formal parameter DIEs segregated. They must
all appear (consecutively) at the start of the list of
children for the DIE representing the function definition.
Then (and only then) may we output any additional DIEs
needed to represent the types of these formal parameters.
*/
if (! last_arg
|| ! DECL_NAME (last_arg)
|| strcmp (IDENTIFIER_POINTER (DECL_NAME (last_arg)),
"__builtin_va_alist"))
{
register tree parm;
/*
When generating DIEs, generate the unspecified_parameters
DIE instead if we come across the arg "__builtin_va_alist"
*/
/* WARNING! Kludge zone ahead! Here we have a special
hack for svr4 SDB compatibility. Instead of passing the
current FUNCTION_DECL node as the second parameter (i.e.
the `containing_scope' parameter) to `output_decl' (as
we ought to) we instead pass a pointer to our own private
fake_containing_scope node. That node is a RECORD_TYPE
node which NO OTHER TYPE may ever actually be a member of.
This pointer will ultimately get passed into `output_type'
as its `containing_scope' parameter. `Output_type' will
then perform its part in the hack... i.e. it will pend
the type of the formal parameter onto the pending_types
list. Later on, when we are done generating the whole
sequence of formal parameter DIEs for this function
definition, we will un-pend all previously pended types
of formal parameters for this function definition.
This whole kludge prevents any type DIEs from being
mixed in with the formal parameter DIEs. That's good
because svr4 SDB believes that the list of formal
parameter DIEs for a function ends wherever the first
non-formal-parameter DIE appears. Thus, we have to
keep the formal parameter DIEs segregated. They must
all appear (consecutively) at the start of the list of
children for the DIE representing the function definition.
Then (and only then) may we output any additional DIEs
needed to represent the types of these formal parameters.
*/
for (parm = arg_decls; parm; parm = TREE_CHAIN (parm))
if (TREE_CODE (parm) == PARM_DECL)
output_decl (parm, fake_containing_scope);
/* Now that we have finished generating all of the DIEs to
represent the formal parameters themselves, force out
any DIEs needed to represent their types. We do this
simply by un-pending all previously pended types which
can legitimately go into the chain of children DIEs for
the current FUNCTION_DECL. */
output_pending_types_for_scope (decl);
for (parm = arg_decls; parm; parm = TREE_CHAIN (parm))
if (TREE_CODE (parm) == PARM_DECL)
{
if (DECL_NAME(parm) &&
!strcmp(IDENTIFIER_POINTER(DECL_NAME(parm)),
"__builtin_va_alist") )
output_die (output_unspecified_parameters_die, decl);
else
output_decl (parm, fake_containing_scope);
}
}
/* Now try to decide if we should put an ellipsis at the end. */
/*
Now that we have finished generating all of the DIEs to
represent the formal parameters themselves, force out
any DIEs needed to represent their types. We do this
simply by un-pending all previously pended types which
can legitimately go into the chain of children DIEs for
the current FUNCTION_DECL.
*/
output_pending_types_for_scope (decl);
/*
Decide whether we need a unspecified_parameters DIE at the end.
There are 2 more cases to do this for:
1) the ansi ... declaration - this is detectable when the end
of the arg list is not a void_type_node
2) an unprototyped function declaration (not a definition). This
just means that we have no info about the parameters at all.
*/
{
register int has_ellipsis = TRUE; /* default assumption */
register tree fn_arg_types = TYPE_ARG_TYPES (TREE_TYPE (decl));
if (fn_arg_types)
{
/* This function declaration/definition was prototyped. */
/* If the list of formal argument types ends with a
void_type_node, then the formals list did *not* end
with an ellipsis. */
if (TREE_VALUE (tree_last (fn_arg_types)) == void_type_node)
has_ellipsis = FALSE;
}
else
{
/* This function declaration/definition was not prototyped. */
/* Note that all non-prototyped function *declarations* are
assumed to represent varargs functions (until proven
otherwise). */
if (DECL_INITIAL (decl)) /* if this is a func definition */
{
if (!arg_decls)
has_ellipsis = FALSE; /* no args == (void) */
else
{
/* For a non-prototyped function definition which
declares one or more formal parameters, if the name
of the first formal parameter is *not*
__builtin_va_alist then we must assume that this
is *not* a varargs function. */
if (DECL_NAME (arg_decls)
&& strcmp (IDENTIFIER_POINTER (DECL_NAME (arg_decls)),
"__builtin_va_alist"))
has_ellipsis = FALSE;
}
}
}
if (has_ellipsis)
output_die (output_unspecified_parameters_die, decl);
/* this is the prototyped case, check for ... */
if (TREE_VALUE (tree_last (fn_arg_types)) != void_type_node)
output_die (output_unspecified_parameters_die, decl);
}
else
{
/* this is unprotoyped, check for undefined (just declaration) */
if (!DECL_INITIAL (decl))
output_die (output_unspecified_parameters_die, decl);
}
}
}

View File

@ -1176,7 +1176,7 @@ operand_subword (op, i, validate_address, mode)
if (GET_MODE_CLASS (mode) != MODE_INT
|| (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
|| BITS_PER_WORD > HOST_BITS_PER_INT)
|| BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
return 0;
if (WORDS_BIG_ENDIAN)
@ -2130,9 +2130,10 @@ add_insn (insn)
last_insn = insn;
}
/* Add INSN into the doubly-linked list after insn AFTER. This should be the
only function called to insert an insn once delay slots have been filled
since only it knows how to update a SEQUENCE. */
/* Add INSN into the doubly-linked list after insn AFTER. This and
the next should be the only functions called to insert an insn once
delay slots have been filled since only they know how to update a
SEQUENCE. */
void
add_insn_after (insn, after)
@ -2140,6 +2141,9 @@ add_insn_after (insn, after)
{
rtx next = NEXT_INSN (after);
if (optimize && INSN_DELETED_P (after))
abort ();
NEXT_INSN (insn) = next;
PREV_INSN (insn) = after;
@ -2157,7 +2161,13 @@ add_insn_after (insn, after)
/* Scan all pending sequences too. */
for (; stack; stack = stack->next)
if (after == stack->last)
stack->last = insn;
{
stack->last = insn;
break;
}
if (stack == 0)
abort ();
}
NEXT_INSN (after) = insn;
@ -2168,6 +2178,54 @@ add_insn_after (insn, after)
}
}
/* Add INSN into the doubly-linked list before insn BEFORE. This and
the previous should be the only functions called to insert an insn once
delay slots have been filled since only they know how to update a
SEQUENCE. */
void
add_insn_before (insn, before)
rtx insn, before;
{
rtx prev = PREV_INSN (before);
if (optimize && INSN_DELETED_P (before))
abort ();
PREV_INSN (insn) = prev;
NEXT_INSN (insn) = before;
if (prev)
{
NEXT_INSN (prev) = insn;
if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
{
rtx sequence = PATTERN (prev);
NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
}
}
else if (first_insn == before)
first_insn = insn;
else
{
struct sequence_stack *stack = sequence_stack;
/* Scan all pending sequences too. */
for (; stack; stack = stack->next)
if (before == stack->first)
{
stack->first = insn;
break;
}
if (stack == 0)
abort ();
}
PREV_INSN (before) = insn;
if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
}
/* Delete all insns made since FROM.
FROM becomes the new last instruction. */
@ -2279,7 +2337,7 @@ emit_insn_before (pattern, before)
for (i = 0; i < XVECLEN (pattern, 0); i++)
{
insn = XVECEXP (pattern, 0, i);
add_insn_after (insn, PREV_INSN (before));
add_insn_before (insn, before);
}
if (XVECLEN (pattern, 0) < SEQUENCE_RESULT_SIZE)
sequence_result[XVECLEN (pattern, 0)] = pattern;
@ -2287,7 +2345,7 @@ emit_insn_before (pattern, before)
else
{
insn = make_insn_raw (pattern);
add_insn_after (insn, PREV_INSN (before));
add_insn_before (insn, before);
}
return insn;
@ -2307,7 +2365,7 @@ emit_jump_insn_before (pattern, before)
else
{
insn = make_jump_insn_raw (pattern);
add_insn_after (insn, PREV_INSN (before));
add_insn_before (insn, before);
}
return insn;
@ -2327,7 +2385,7 @@ emit_call_insn_before (pattern, before)
else
{
insn = make_call_insn_raw (pattern);
add_insn_after (insn, PREV_INSN (before));
add_insn_before (insn, before);
PUT_CODE (insn, CALL_INSN);
}
@ -2345,7 +2403,7 @@ emit_barrier_before (before)
INSN_UID (insn) = cur_insn_uid++;
add_insn_after (insn, PREV_INSN (before));
add_insn_before (insn, before);
return insn;
}
@ -2361,7 +2419,7 @@ emit_note_before (subtype, before)
NOTE_SOURCE_FILE (note) = 0;
NOTE_LINE_NUMBER (note) = subtype;
add_insn_after (note, PREV_INSN (before));
add_insn_before (note, before);
return note;
}
@ -2577,7 +2635,7 @@ emit_insns_before (insn, before)
while (insn)
{
rtx next = NEXT_INSN (insn);
add_insn_after (insn, PREV_INSN (before));
add_insn_before (insn, before);
last = insn;
insn = next;
}

View File

@ -299,7 +299,10 @@ store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, align, total_size)
/* Here we transfer the words of the field
in the order least significant first.
This is because the most significant word is the one which may
be less than full. */
be less than full.
However, only do that if the value is not BLKmode. */
int backwards = WORDS_BIG_ENDIAN && fieldmode != BLKmode;
int nwords = (bitsize + (BITS_PER_WORD - 1)) / BITS_PER_WORD;
int i;
@ -315,8 +318,8 @@ store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, align, total_size)
{
/* If I is 0, use the low-order word in both field and target;
if I is 1, use the next to lowest word; and so on. */
int wordnum = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
int bit_offset = (WORDS_BIG_ENDIAN
int wordnum = (backwards ? nwords - i - 1 : i);
int bit_offset = (backwards
? MAX (bitsize - (i + 1) * BITS_PER_WORD, 0)
: i * BITS_PER_WORD);
store_bit_field (op0, MIN (BITS_PER_WORD,
@ -369,7 +372,10 @@ store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, align, total_size)
&& !(bitsize == 1 && GET_CODE (value) == CONST_INT)
/* Ensure insv's size is wide enough for this field. */
&& (GET_MODE_BITSIZE (insn_operand_mode[(int) CODE_FOR_insv][3])
>= bitsize))
>= bitsize)
&& ! ((GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
&& (bitsize + bitpos
> GET_MODE_BITSIZE (insn_operand_mode[(int) CODE_FOR_insv][3]))))
{
int xbitpos = bitpos;
rtx value1;
@ -741,19 +747,13 @@ store_split_bit_field (op0, bitsize, bitpos, value, align)
>> (bitsize - bitsdone - thissize))
& (((HOST_WIDE_INT) 1 << thissize) - 1));
else
{
/* The args are chosen so that the last part
includes the lsb. */
int bit_offset = 0;
/* If the value isn't in memory, then it must be right aligned
if a register, so skip past the padding on the left. If it
is in memory, then there is no padding on the left. */
if (GET_CODE (value) != MEM)
bit_offset = BITS_PER_WORD - bitsize;
part = extract_fixed_bit_field (word_mode, value, 0, thissize,
bit_offset + bitsdone,
NULL_RTX, 1, align);
}
/* The args are chosen so that the last part includes the lsb.
Give extract_bit_field the value it needs (with endianness
compensation) to fetch the piece we want. */
part = extract_fixed_bit_field (word_mode, value, 0, thissize,
GET_MODE_BITSIZE (GET_MODE (value))
- bitsize + bitsdone,
NULL_RTX, 1, align);
#else
/* Fetch successively more significant portions. */
if (GET_CODE (value) == CONST_INT)
@ -972,7 +972,10 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
#ifdef HAVE_extzv
if (HAVE_extzv
&& (GET_MODE_BITSIZE (insn_operand_mode[(int) CODE_FOR_extzv][0])
>= bitsize))
>= bitsize)
&& ! ((GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
&& (bitsize + bitpos
> GET_MODE_BITSIZE (insn_operand_mode[(int) CODE_FOR_extzv][0]))))
{
int xbitpos = bitpos, xoffset = offset;
rtx bitsize_rtx, bitpos_rtx;
@ -1111,7 +1114,10 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
#ifdef HAVE_extv
if (HAVE_extv
&& (GET_MODE_BITSIZE (insn_operand_mode[(int) CODE_FOR_extv][0])
>= bitsize))
>= bitsize)
&& ! ((GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
&& (bitsize + bitpos
> GET_MODE_BITSIZE (insn_operand_mode[(int) CODE_FOR_extv][0]))))
{
int xbitpos = bitpos, xoffset = offset;
rtx bitsize_rtx, bitpos_rtx;
@ -2053,23 +2059,31 @@ expand_mult (mode, op0, op1, target, unsignedp)
{
rtx const_op1 = op1;
/* synth_mult does an `unsigned int' multiply. As long as the mode is
less than or equal in size to `unsigned int' this doesn't matter.
If the mode is larger than `unsigned int', then synth_mult works only
if the constant value exactly fits in an `unsigned int' without any
truncation. This means that multiplying by negative values does
not work; results are off by 2^32 on a 32 bit machine. */
/* If we are multiplying in DImode, it may still be a win
to try to work with shifts and adds. */
if (GET_CODE (op1) == CONST_DOUBLE
&& GET_MODE_CLASS (GET_MODE (op1)) == MODE_INT
&& HOST_BITS_PER_INT <= BITS_PER_WORD)
{
if ((CONST_DOUBLE_HIGH (op1) == 0 && CONST_DOUBLE_LOW (op1) >= 0)
|| (CONST_DOUBLE_HIGH (op1) == -1 && CONST_DOUBLE_LOW (op1) < 0))
const_op1 = GEN_INT (CONST_DOUBLE_LOW (op1));
}
&& HOST_BITS_PER_INT >= BITS_PER_WORD
&& CONST_DOUBLE_HIGH (op1) == 0)
const_op1 = GEN_INT (CONST_DOUBLE_LOW (op1));
else if (HOST_BITS_PER_INT < GET_MODE_BITSIZE (mode)
&& GET_CODE (op1) == CONST_INT
&& INTVAL (op1) < 0)
const_op1 = 0;
/* We used to test optimize here, on the grounds that it's better to
produce a smaller program when -O is not used.
But this causes such a terrible slowdown sometimes
that it seems better to use synth_mult always. */
if (GET_CODE (const_op1) == CONST_INT)
if (const_op1 && GET_CODE (const_op1) == CONST_INT)
{
struct algorithm alg;
struct algorithm alg2;
@ -2087,13 +2101,20 @@ expand_mult (mode, op0, op1, target, unsignedp)
mult_cost = MIN (12 * add_cost, mult_cost);
synth_mult (&alg, val, mult_cost);
synth_mult (&alg2, - val,
(alg.cost < mult_cost ? alg.cost : mult_cost) - negate_cost);
if (alg2.cost + negate_cost < alg.cost)
alg = alg2, variant = negate_variant;
/* This works only if the inverted value actually fits in an
`unsigned int' */
if (HOST_BITS_PER_INT >= GET_MODE_BITSIZE (mode))
{
synth_mult (&alg2, - val,
(alg.cost < mult_cost ? alg.cost : mult_cost) - negate_cost);
if (alg2.cost + negate_cost < alg.cost)
alg = alg2, variant = negate_variant;
}
/* This proves very useful for division-by-constant. */
synth_mult (&alg2, val - 1, (alg.cost < mult_cost ? alg.cost : mult_cost) - add_cost);
synth_mult (&alg2, val - 1,
(alg.cost < mult_cost ? alg.cost : mult_cost) - add_cost);
if (alg2.cost + add_cost < alg.cost)
alg = alg2, variant = add_variant;
@ -2131,7 +2152,9 @@ expand_mult (mode, op0, op1, target, unsignedp)
int log = alg.log[opno];
int preserve = preserve_subexpressions_p ();
rtx shift_subtarget = preserve ? 0 : accum;
rtx add_target = opno == alg.ops - 1 && target != 0 ? target : 0;
rtx add_target
= (opno == alg.ops - 1 && target != 0 && variant != add_variant
? target : 0);
rtx accum_target = preserve ? 0 : accum;
switch (alg.op[opno])
@ -2568,7 +2591,7 @@ expand_divmod (rem_flag, code, mode, op0, op1, target, unsignedp)
rtx quotient = 0, remainder = 0;
rtx last;
int size;
rtx insn;
rtx insn, set;
optab optab1, optab2;
int op1_is_constant, op1_is_pow2;
@ -2813,10 +2836,13 @@ expand_divmod (rem_flag, code, mode, op0, op1, target, unsignedp)
}
insn = get_last_insn ();
REG_NOTES (insn)
= gen_rtx (EXPR_LIST, REG_EQUAL,
gen_rtx (UDIV, compute_mode, op0, op1),
REG_NOTES (insn));
if (insn != last
&& (set = single_set (insn)) != 0
&& SET_DEST (set) == quotient)
REG_NOTES (insn)
= gen_rtx (EXPR_LIST, REG_EQUAL,
gen_rtx (UDIV, compute_mode, op0, op1),
REG_NOTES (insn));
}
else /* TRUNC_DIV, signed */
{
@ -2878,11 +2904,14 @@ expand_divmod (rem_flag, code, mode, op0, op1, target, unsignedp)
if (d < 0)
{
insn = get_last_insn ();
REG_NOTES (insn)
= gen_rtx (EXPR_LIST, REG_EQUAL,
gen_rtx (DIV, compute_mode, op0,
GEN_INT (abs_d)),
REG_NOTES (insn));
if (insn != last
&& (set = single_set (insn)) != 0
&& SET_DEST (set) == quotient)
REG_NOTES (insn)
= gen_rtx (EXPR_LIST, REG_EQUAL,
gen_rtx (DIV, compute_mode, op0,
GEN_INT (abs_d)),
REG_NOTES (insn));
quotient = expand_unop (compute_mode, neg_optab,
quotient, quotient, 0);
@ -2935,14 +2964,14 @@ expand_divmod (rem_flag, code, mode, op0, op1, target, unsignedp)
}
}
if (quotient != 0)
{
insn = get_last_insn ();
REG_NOTES (insn)
= gen_rtx (EXPR_LIST, REG_EQUAL,
gen_rtx (DIV, compute_mode, op0, op1),
REG_NOTES (insn));
}
insn = get_last_insn ();
if (insn != last
&& (set = single_set (insn)) != 0
&& SET_DEST (set) == quotient)
REG_NOTES (insn)
= gen_rtx (EXPR_LIST, REG_EQUAL,
gen_rtx (DIV, compute_mode, op0, op1),
REG_NOTES (insn));
}
break;
}
@ -3218,6 +3247,44 @@ expand_divmod (rem_flag, code, mode, op0, op1, target, unsignedp)
}
else /* signed */
{
if (op1_is_constant && EXACT_POWER_OF_2_OR_ZERO_P (INTVAL (op1))
&& INTVAL (op1) >= 0)
{
/* This is extremely similar to the code for the unsigned case
above. For 2.7 we should merge these variants, but for
2.6.1 I don't want to touch the code for unsigned since that
get used in C. The signed case will only be used by other
languages (Ada). */
rtx t1, t2, t3;
unsigned HOST_WIDE_INT d = INTVAL (op1);
t1 = expand_shift (RSHIFT_EXPR, compute_mode, op0,
build_int_2 (floor_log2 (d), 0),
tquotient, 0);
t2 = expand_binop (compute_mode, and_optab, op0,
GEN_INT (d - 1),
NULL_RTX, 1, OPTAB_LIB_WIDEN);
t3 = gen_reg_rtx (compute_mode);
t3 = emit_store_flag (t3, NE, t2, const0_rtx,
compute_mode, 1, 1);
if (t3 == 0)
{
rtx lab;
lab = gen_label_rtx ();
emit_cmp_insn (t2, const0_rtx, EQ, NULL_RTX,
compute_mode, 0, 0);
emit_jump_insn (gen_beq (lab));
expand_inc (t1, const1_rtx);
emit_label (lab);
quotient = t1;
}
else
quotient = force_operand (gen_rtx (PLUS, compute_mode,
t1, t3),
tquotient);
break;
}
/* Try using an instruction that produces both the quotient and
remainder, using truncation. We can easily compensate the
quotient or remainder to get ceiling rounding, once we have the

View File

@ -1025,6 +1025,41 @@ convert_move (to, from, unsignedp)
}
}
if (to_mode == PDImode)
{
if (from_mode != DImode)
from = convert_to_mode (DImode, from, unsignedp);
#ifdef HAVE_truncdipdi2
if (HAVE_truncdipdi2)
{
emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
return;
}
#endif /* HAVE_truncdipdi2 */
abort ();
}
if (from_mode == PDImode)
{
if (to_mode != DImode)
{
from = convert_to_mode (DImode, from, unsignedp);
from_mode = DImode;
}
else
{
#ifdef HAVE_extendpdidi2
if (HAVE_extendpdidi2)
{
emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
return;
}
#endif /* HAVE_extendpdidi2 */
abort ();
}
}
/* Now follow all the conversions between integers
no more than a word long. */
@ -1164,6 +1199,58 @@ convert_move (to, from, unsignedp)
return;
}
if (from_mode == TImode && to_mode == DImode)
{
#ifdef HAVE_trunctidi2
if (HAVE_trunctidi2)
{
emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
return;
}
#endif
convert_move (to, force_reg (from_mode, from), unsignedp);
return;
}
if (from_mode == TImode && to_mode == SImode)
{
#ifdef HAVE_trunctisi2
if (HAVE_trunctisi2)
{
emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
return;
}
#endif
convert_move (to, force_reg (from_mode, from), unsignedp);
return;
}
if (from_mode == TImode && to_mode == HImode)
{
#ifdef HAVE_trunctihi2
if (HAVE_trunctihi2)
{
emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
return;
}
#endif
convert_move (to, force_reg (from_mode, from), unsignedp);
return;
}
if (from_mode == TImode && to_mode == QImode)
{
#ifdef HAVE_trunctiqi2
if (HAVE_trunctiqi2)
{
emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
return;
}
#endif
convert_move (to, force_reg (from_mode, from), unsignedp);
return;
}
/* Handle truncation of volatile memrefs, and so on;
the things that couldn't be truncated directly,
and for which there was no special instruction. */
@ -1603,6 +1690,9 @@ move_block_to_reg (regno, x, nregs, mode)
int i;
rtx pat, last;
if (nregs == 0)
return;
if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
x = validize_mem (force_const_mem (mode, x));
@ -1823,8 +1913,6 @@ emit_move_insn_1 (x, y)
int stack = push_operand (x, GET_MODE (x));
rtx insns;
start_sequence ();
/* If this is a stack, push the highpart first, so it
will be in the argument order.
@ -1858,17 +1946,6 @@ emit_move_insn_1 (x, y)
(gen_imagpart (submode, x), gen_imagpart (submode, y)));
}
insns = get_insns ();
end_sequence ();
/* If X is a CONCAT, we got insns like RD = RS, ID = IS,
each with a separate pseudo as destination.
It's not correct for flow to treat them as a unit. */
if (GET_CODE (x) != CONCAT)
emit_no_conflict_block (insns, x, y, NULL_RTX, NULL_RTX);
else
emit_insns (insns);
return get_last_insn ();
}
@ -1880,8 +1957,6 @@ emit_move_insn_1 (x, y)
rtx last_insn = 0;
rtx insns;
start_sequence ();
for (i = 0;
i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
i++)
@ -1906,10 +1981,6 @@ emit_move_insn_1 (x, y)
last_insn = emit_move_insn (xpart, ypart);
}
insns = get_insns ();
end_sequence ();
emit_no_conflict_block (insns, x, y, NULL_RTX, NULL_RTX);
return last_insn;
}
else
@ -2724,6 +2795,7 @@ store_expr (exp, target, want_value)
The string constant may be shorter than the array.
So copy just the string's actual length, and clear the rest. */
rtx size;
rtx addr;
/* Get the size of the data type of the string,
which is actually the size of the target. */
@ -2752,17 +2824,16 @@ store_expr (exp, target, want_value)
that we have to clear. */
if (GET_CODE (copy_size_rtx) == CONST_INT)
{
temp = plus_constant (XEXP (target, 0),
addr = plus_constant (XEXP (target, 0),
TREE_STRING_LENGTH (exp));
size = plus_constant (size,
- TREE_STRING_LENGTH (exp));
size = plus_constant (size, - TREE_STRING_LENGTH (exp));
}
else
{
enum machine_mode size_mode = Pmode;
temp = force_reg (Pmode, XEXP (target, 0));
temp = expand_binop (size_mode, add_optab, temp,
addr = force_reg (Pmode, XEXP (target, 0));
addr = expand_binop (size_mode, add_optab, addr,
copy_size_rtx, NULL_RTX, 0,
OPTAB_LIB_WIDEN);
@ -2779,13 +2850,14 @@ store_expr (exp, target, want_value)
if (size != const0_rtx)
{
#ifdef TARGET_MEM_FUNCTIONS
emit_library_call (memset_libfunc, 0, VOIDmode, 3,
temp, Pmode, const0_rtx, Pmode, size, Pmode);
emit_library_call (memset_libfunc, 0, VOIDmode, 3, addr,
Pmode, const0_rtx, Pmode, size, Pmode);
#else
emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
temp, Pmode, size, Pmode);
addr, Pmode, size, Pmode);
#endif
}
if (label)
emit_label (label);
}
@ -3507,7 +3579,13 @@ safe_from_p (x, exp)
rtx exp_rtl = 0;
int i, nops;
if (x == 0)
if (x == 0
/* If EXP has varying size, we MUST use a target since we currently
have no way of allocating temporaries of variable size. So we
assume here that something at a higher level has prevented a
clash. This is somewhat bogus, but the best we can do. */
|| (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST))
return 1;
/* If this is a subreg of a hard register, declare it unsafe, otherwise,
@ -4340,13 +4418,16 @@ expand_expr (exp, target, tmode, modifier)
}
/* Fold an expression like: "foo"[2].
This is not done in fold so it won't happen inside &. */
This is not done in fold so it won't happen inside &.
Don't fold if this is for wide characters since it's too
difficult to do correctly and this is a very rare case. */
if (TREE_CODE (array) == STRING_CST
&& TREE_CODE (index) == INTEGER_CST
&& !TREE_INT_CST_HIGH (index)
&& (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
&& GET_MODE_CLASS (mode) == MODE_INT)
&& GET_MODE_CLASS (mode) == MODE_INT
&& GET_MODE_SIZE (mode) == 1)
return GEN_INT (TREE_STRING_POINTER (array)[i]);
/* If this is a constant index into a constant array,
@ -4404,9 +4485,12 @@ expand_expr (exp, target, tmode, modifier)
case COMPONENT_REF:
case BIT_FIELD_REF:
/* If the operand is a CONSTRUCTOR, we can just extract the
appropriate field if it is present. */
appropriate field if it is present. Don't do this if we have
already written the data since we want to refer to that copy
and varasm.c assumes that's what we'll do. */
if (code != ARRAY_REF
&& TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
&& TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
&& TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
{
tree elt;
@ -5450,7 +5534,9 @@ expand_expr (exp, target, tmode, modifier)
temp = 0;
else if (original_target
&& safe_from_p (original_target, TREE_OPERAND (exp, 0))
&& GET_MODE (original_target) == mode)
&& GET_MODE (original_target) == mode
&& ! (GET_CODE (original_target) == MEM
&& MEM_VOLATILE_P (original_target)))
temp = original_target;
else if (mode == BLKmode)
{
@ -5498,8 +5584,7 @@ expand_expr (exp, target, tmode, modifier)
&& (TREE_CODE (binary_op) == PLUS_EXPR
|| TREE_CODE (binary_op) == MINUS_EXPR
|| TREE_CODE (binary_op) == BIT_IOR_EXPR
|| TREE_CODE (binary_op) == BIT_XOR_EXPR
|| TREE_CODE (binary_op) == BIT_AND_EXPR)
|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
&& integer_onep (TREE_OPERAND (binary_op, 1))
&& TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
{
@ -5507,8 +5592,7 @@ expand_expr (exp, target, tmode, modifier)
optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
: TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
: TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
: TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
: and_optab);
: xor_optab);
/* If we had X ? A : A + 1, do this as A + (X == 0).
@ -5732,7 +5816,8 @@ expand_expr (exp, target, tmode, modifier)
left_cleanups = integer_zero_node;
if (! right_cleanups)
right_cleanups = integer_zero_node;
new_cleanups = build (COND_EXPR, void_type_node, cond,
new_cleanups = build (COND_EXPR, void_type_node,
truthvalue_conversion (cond),
left_cleanups, right_cleanups);
new_cleanups = fold (new_cleanups);
@ -5969,7 +6054,10 @@ expand_expr (exp, target, tmode, modifier)
op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
op0);
else if (GET_CODE (op0) == MEM)
temp = XEXP (op0, 0);
{
mark_temp_addr_taken (op0);
temp = XEXP (op0, 0);
}
else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
|| GET_CODE (op0) == CONCAT)
@ -5982,6 +6070,7 @@ expand_expr (exp, target, tmode, modifier)
= assign_stack_temp (inner_mode,
int_size_in_bytes (inner_type), 1);
mark_temp_addr_taken (memloc);
emit_move_insn (memloc, op0);
op0 = memloc;
}
@ -6055,6 +6144,7 @@ expand_expr (exp, target, tmode, modifier)
case CONJ_EXPR:
{
enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
rtx imag_t;
rtx insns;
@ -6066,11 +6156,12 @@ expand_expr (exp, target, tmode, modifier)
start_sequence ();
/* Store the realpart and the negated imagpart to target. */
emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
emit_move_insn (gen_realpart (partmode, target),
gen_realpart (partmode, op0));
imag_t = gen_imagpart (mode, target);
temp = expand_unop (mode, neg_optab,
gen_imagpart (mode, op0), imag_t, 0);
imag_t = gen_imagpart (partmode, target);
temp = expand_unop (partmode, neg_optab,
gen_imagpart (partmode, op0), imag_t, 0);
if (temp != imag_t)
emit_move_insn (imag_t, temp);
@ -6836,6 +6927,9 @@ expand_builtin (exp, target, subtarget, mode, ignore)
case BUILT_IN_SIN:
case BUILT_IN_COS:
/* Treat these like sqrt, but only if the user asks for them. */
if (! flag_fast_math)
break;
case BUILT_IN_FSQRT:
/* If not optimizing, call the library function. */
if (! optimize)
@ -7792,7 +7886,7 @@ result_vector (savep, result)
align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
if (size % align != 0)
size = CEIL (size, align) * align;
reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
mem = change_address (result, mode,
plus_constant (XEXP (result, 0), size));
savevec[nelts++] = (savep
@ -8284,7 +8378,9 @@ preexpand_calls (exp)
/* Do nothing to built-in functions. */
if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
|| TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
|| ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
|| ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
/* Do nothing if the call returns a variable-sized object. */
|| TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST)
CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
return;
@ -8371,6 +8467,7 @@ defer_cleanups_to (old_cleanups)
while (cleanups_this_call != old_cleanups)
{
(*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
last = cleanups_this_call;
cleanups_this_call = TREE_CHAIN (cleanups_this_call);
}
@ -8555,17 +8652,117 @@ do_jump (exp, if_false_label, if_true_label)
break;
case TRUTH_ANDIF_EXPR:
if (if_false_label == 0)
if_false_label = drop_through_label = gen_label_rtx ();
do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
{
rtx seq1, seq2;
tree cleanups, old_cleanups;
if (if_false_label == 0)
if_false_label = drop_through_label = gen_label_rtx ();
start_sequence ();
do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
seq1 = get_insns ();
end_sequence ();
old_cleanups = cleanups_this_call;
start_sequence ();
do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
seq2 = get_insns ();
end_sequence ();
cleanups = defer_cleanups_to (old_cleanups);
if (cleanups)
{
rtx flag = gen_reg_rtx (word_mode);
tree new_cleanups;
tree cond;
/* Flag cleanups as not needed. */
emit_move_insn (flag, const0_rtx);
emit_insns (seq1);
/* Flag cleanups as needed. */
emit_move_insn (flag, const1_rtx);
emit_insns (seq2);
/* convert flag, which is an rtx, into a tree. */
cond = make_node (RTL_EXPR);
TREE_TYPE (cond) = integer_type_node;
RTL_EXPR_RTL (cond) = flag;
RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
new_cleanups = build (COND_EXPR, void_type_node,
truthvalue_conversion (cond),
cleanups, integer_zero_node);
new_cleanups = fold (new_cleanups);
/* Now add in the conditionalized cleanups. */
cleanups_this_call
= tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
(*interim_eh_hook) (NULL_TREE);
}
else
{
emit_insns (seq1);
emit_insns (seq2);
}
}
break;
case TRUTH_ORIF_EXPR:
if (if_true_label == 0)
if_true_label = drop_through_label = gen_label_rtx ();
do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
{
rtx seq1, seq2;
tree cleanups, old_cleanups;
if (if_true_label == 0)
if_true_label = drop_through_label = gen_label_rtx ();
start_sequence ();
do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
seq1 = get_insns ();
end_sequence ();
old_cleanups = cleanups_this_call;
start_sequence ();
do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
seq2 = get_insns ();
end_sequence ();
cleanups = defer_cleanups_to (old_cleanups);
if (cleanups)
{
rtx flag = gen_reg_rtx (word_mode);
tree new_cleanups;
tree cond;
/* Flag cleanups as not needed. */
emit_move_insn (flag, const0_rtx);
emit_insns (seq1);
/* Flag cleanups as needed. */
emit_move_insn (flag, const1_rtx);
emit_insns (seq2);
/* convert flag, which is an rtx, into a tree. */
cond = make_node (RTL_EXPR);
TREE_TYPE (cond) = integer_type_node;
RTL_EXPR_RTL (cond) = flag;
RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
new_cleanups = build (COND_EXPR, void_type_node,
truthvalue_conversion (cond),
cleanups, integer_zero_node);
new_cleanups = fold (new_cleanups);
/* Now add in the conditionalized cleanups. */
cleanups_this_call
= tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
(*interim_eh_hook) (NULL_TREE);
}
else
{
emit_insns (seq1);
emit_insns (seq2);
}
}
break;
case COMPOUND_EXPR:
@ -8980,9 +9177,6 @@ do_jump_for_compare (comparison, if_false_label, if_true_label)
rtx prev = get_last_insn ();
rtx branch = 0;
if (prev != 0)
prev = PREV_INSN (prev);
/* Output the branch with the opposite condition. Then try to invert
what is generated. If more than one insn is a branch, or if the
branch is not the last insn written, abort. If we can't invert
@ -8990,20 +9184,23 @@ do_jump_for_compare (comparison, if_false_label, if_true_label)
emit a jump to the false label and define the true label. */
if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
else
abort ();
/* Here we get the insn before what was just emitted.
On some machines, emitting the branch can discard
the previous compare insn and emit a replacement. */
/* Here we get the first insn that was just emitted. It used to be the
case that, on some machines, emitting the branch would discard
the previous compare insn and emit a replacement. This isn't
done anymore, but abort if we see that PREV is deleted. */
if (prev == 0)
/* If there's only one preceding insn... */
insn = get_insns ();
else if (INSN_DELETED_P (prev))
abort ();
else
insn = NEXT_INSN (prev);
for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
for (; insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == JUMP_INSN)
{
if (branch)

View File

@ -824,9 +824,13 @@ asm_insn_count (body)
char *template;
int count = 1;
for (template = decode_asm_operands (body, NULL_PTR, NULL_PTR,
NULL_PTR, NULL_PTR);
*template; template++)
if (GET_CODE (body) == ASM_INPUT)
template = XSTR (body, 0);
else
template = decode_asm_operands (body, NULL_PTR, NULL_PTR,
NULL_PTR, NULL_PTR);
for ( ; *template; template++)
if (IS_ASM_LOGICAL_LINE_SEPARATOR(*template) || *template == '\n')
count++;
@ -953,18 +957,14 @@ static void
profile_function (file)
FILE *file;
{
#ifndef NO_PROFILE_DATA
int align = MIN (BIGGEST_ALIGNMENT, POINTER_SIZE);
#endif /* not NO_PROFILE_DATA */
int sval = current_function_returns_struct;
int cxt = current_function_needs_context;
#ifndef NO_PROFILE_DATA
data_section ();
ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
ASM_OUTPUT_INTERNAL_LABEL (file, "LP", profile_label_no);
assemble_integer (const0_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
#endif /* not NO_PROFILE_DATA */
text_section ();

View File

@ -177,6 +177,12 @@ int *reg_basic_block;
int *reg_n_refs;
/* Indexed by N; says whether a psuedo register N was ever used
within a SUBREG that changes the size of the reg. Some machines prohibit
such objects to be in certain (usually floating-point) registers. */
char *reg_changes_size;
/* Indexed by N, gives number of places register N dies.
This information remains valid for the rest of the compilation
of the current function; it is used to control register allocation. */
@ -602,6 +608,20 @@ find_basic_blocks (f, nonlocal_label_list)
}
}
/* ??? See if we have a "live" basic block that is not reachable.
This can happen if it is headed by a label that is preserved or
in one of the label lists, but no call or computed jump is in
the loop. It's not clear if we can delete the block or not,
but don't for now. However, we will mess up register status if
it remains unreachable, so add a fake reachability from the
previous block. */
for (i = 1; i < n_basic_blocks; i++)
if (block_live[i] && ! basic_block_drops_in[i]
&& GET_CODE (basic_block_head[i]) == CODE_LABEL
&& LABEL_REFS (basic_block_head[i]) == basic_block_head[i])
basic_block_drops_in[i] = 1;
/* Now delete the code for any basic blocks that can't be reached.
They can occur because jump_optimize does not recognize
unreachable loops as unreachable. */
@ -1055,18 +1075,18 @@ life_analysis (f, nregs)
{
register rtx jump, head;
/* Update the basic_block_new_live_at_end's of the block
that falls through into this one (if any). */
head = basic_block_head[i];
jump = PREV_INSN (head);
if (basic_block_drops_in[i])
{
register int from_block = BLOCK_NUM (jump);
register int j;
for (j = 0; j < regset_size; j++)
basic_block_new_live_at_end[from_block][j]
basic_block_new_live_at_end[i-1][j]
|= basic_block_live_at_start[i][j];
}
/* Update the basic_block_new_live_at_end's of
all the blocks that jump to this one. */
if (GET_CODE (head) == CODE_LABEL)
@ -1183,6 +1203,9 @@ allocate_for_life_analysis ()
reg_n_deaths = (short *) oballoc (max_regno * sizeof (short));
bzero ((char *) reg_n_deaths, max_regno * sizeof (short));
reg_changes_size = (char *) oballoc (max_regno * sizeof (char));
bzero (reg_changes_size, max_regno * sizeof (char));;
reg_live_length = (int *) oballoc (max_regno * sizeof (int));
bzero ((char *) reg_live_length, max_regno * sizeof (int));
@ -1512,11 +1535,11 @@ propagate_block (old, first, last, final, significant, bnum)
/* Calls may also reference any of the global registers,
so they are made live. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (global_regs[i])
live[i / REGSET_ELT_BITS]
|= ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS));
mark_used_regs (old, live,
gen_rtx (REG, reg_raw_mode[i], i),
final, insn);
/* Calls also clobber memory. */
last_mem_set = 0;
@ -2081,11 +2104,21 @@ find_auto_inc (needed, x, insn)
&& (use = find_use_as_address (PATTERN (insn), addr, offset),
use != 0 && use != (rtx) 1))
{
int win = 0;
rtx q = SET_DEST (set);
enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
? (offset ? PRE_INC : POST_INC)
: (offset ? PRE_DEC : POST_DEC));
if (dead_or_set_p (incr, addr))
win = 1;
{
/* This is the simple case. Try to make the auto-inc. If
we can't, we are done. Otherwise, we will do any
needed updates below. */
if (! validate_change (insn, &XEXP (x, 0),
gen_rtx (inc_code, Pmode, addr),
0))
return;
}
else if (GET_CODE (q) == REG
/* PREV_INSN used here to check the semi-open interval
[insn,incr). */
@ -2113,14 +2146,25 @@ find_auto_inc (needed, x, insn)
BLOCK_NUM (temp) = BLOCK_NUM (insn);
}
/* If we can't make the auto-inc, or can't make the
replacement into Y, exit. There's no point in making
the change below if we can't do the auto-inc and doing
so is not correct in the pre-inc case. */
validate_change (insn, &XEXP (x, 0),
gen_rtx (inc_code, Pmode, q),
1);
validate_change (incr, &XEXP (y, 0), q, 1);
if (! apply_change_group ())
return;
/* We now know we'll be doing this change, so emit the
new insn(s) and do the updates. */
emit_insns_before (insns, insn);
if (basic_block_head[BLOCK_NUM (insn)] == insn)
basic_block_head[BLOCK_NUM (insn)] = insns;
XEXP (x, 0) = q;
XEXP (y, 0) = q;
/* INCR will become a NOTE and INSN won't contain a
use of ADDR. If a use of ADDR was just placed in
the insn before INSN, make that the next use.
@ -2134,7 +2178,6 @@ find_auto_inc (needed, x, insn)
addr = q;
regno = REGNO (q);
win = 1;
/* REGNO is now used in INCR which is below INSN, but
it previously wasn't live here. If we don't mark
@ -2150,46 +2193,38 @@ find_auto_inc (needed, x, insn)
reg_n_calls_crossed[regno]++;
}
if (win
/* If we have found a suitable auto-increment, do
POST_INC around the register here, and patch out the
increment instruction that follows. */
&& validate_change (insn, &XEXP (x, 0),
gen_rtx ((INTVAL (XEXP (y, 1)) == size
? (offset ? PRE_INC : POST_INC)
: (offset ? PRE_DEC : POST_DEC)),
Pmode, addr), 0))
/* If we haven't returned, it means we were able to make the
auto-inc, so update the status. First, record that this insn
has an implicit side effect. */
REG_NOTES (insn)
= gen_rtx (EXPR_LIST, REG_INC, addr, REG_NOTES (insn));
/* Modify the old increment-insn to simply copy
the already-incremented value of our register. */
if (! validate_change (incr, &SET_SRC (set), addr, 0))
abort ();
/* If that makes it a no-op (copying the register into itself) delete
it so it won't appear to be a "use" and a "set" of this
register. */
if (SET_DEST (set) == addr)
{
/* Record that this insn has an implicit side effect. */
REG_NOTES (insn)
= gen_rtx (EXPR_LIST, REG_INC, addr, REG_NOTES (insn));
PUT_CODE (incr, NOTE);
NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
NOTE_SOURCE_FILE (incr) = 0;
}
/* Modify the old increment-insn to simply copy
the already-incremented value of our register. */
SET_SRC (set) = addr;
/* Indicate insn must be re-recognized. */
INSN_CODE (incr) = -1;
if (regno >= FIRST_PSEUDO_REGISTER)
{
/* Count an extra reference to the reg. When a reg is
incremented, spilling it is worse, so we want to make
that less likely. */
reg_n_refs[regno] += loop_depth;
/* If that makes it a no-op (copying the register into itself)
then delete it so it won't appear to be a "use" and a "set"
of this register. */
if (SET_DEST (set) == addr)
{
PUT_CODE (incr, NOTE);
NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
NOTE_SOURCE_FILE (incr) = 0;
}
if (regno >= FIRST_PSEUDO_REGISTER)
{
/* Count an extra reference to the reg. When a reg is
incremented, spilling it is worse, so we want to make
that less likely. */
reg_n_refs[regno] += loop_depth;
/* Count the increment as a setting of the register,
even though it isn't a SET in rtl. */
reg_n_sets[regno]++;
}
/* Count the increment as a setting of the register,
even though it isn't a SET in rtl. */
reg_n_sets[regno]++;
}
}
}
@ -2257,6 +2292,20 @@ mark_used_regs (needed, live, x, final, insn)
#endif
break;
case SUBREG:
if (GET_CODE (SUBREG_REG (x)) == REG
&& REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
&& (GET_MODE_SIZE (GET_MODE (x))
!= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
&& (INTEGRAL_MODE_P (GET_MODE (x))
|| INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (x)))))
reg_changes_size[REGNO (SUBREG_REG (x))] = 1;
/* While we're here, optimize this case. */
x = SUBREG_REG (x);
/* ... fall through ... */
case REG:
/* See a register other than being set
=> mark it as needed. */
@ -2368,6 +2417,16 @@ mark_used_regs (needed, live, x, final, insn)
#endif
)
{
/* Check for the case where the register dying partially
overlaps the register set by this insn. */
if (regno < FIRST_PSEUDO_REGISTER
&& HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
{
int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
while (--n >= 0)
some_needed |= dead_or_set_regno_p (insn, regno + n);
}
/* If none of the words in X is needed, make a REG_DEAD
note. Otherwise, we must make partial REG_DEAD notes. */
if (! some_needed)

View File

@ -2836,12 +2836,14 @@ fold_truthop (code, truth_type, lhs, rhs)
l_const = convert (unsigned_type (TREE_TYPE (l_const)), l_const);
l_const = const_binop (LSHIFT_EXPR, convert (type, l_const),
size_int (xll_bitpos), 0);
l_const = const_binop (BIT_AND_EXPR, l_const, ll_mask, 0);
}
if (r_const)
{
r_const = convert (unsigned_type (TREE_TYPE (r_const)), r_const);
r_const = const_binop (LSHIFT_EXPR, convert (type, r_const),
size_int (xrl_bitpos), 0);
r_const = const_binop (BIT_AND_EXPR, r_const, rl_mask, 0);
}
/* If the right sides are not constant, do the same for it. Also,
@ -3420,6 +3422,15 @@ fold (expr)
return t;
#endif /* 0 */
case COMPONENT_REF:
if (TREE_CODE (arg0) == CONSTRUCTOR)
{
tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
if (m)
t = TREE_VALUE (m);
}
return t;
case RANGE_EXPR:
TREE_CONSTANT (t) = wins;
return t;
@ -3733,8 +3744,8 @@ fold (expr)
Also note that operand_equal_p is always false if an operand
is volatile. */
if (operand_equal_p (arg0, arg1,
FLOAT_TYPE_P (type) && ! flag_fast_math))
if ((! FLOAT_TYPE_P (type) || flag_fast_math)
&& operand_equal_p (arg0, arg1, 0))
return convert (type, integer_zero_node);
goto associate;

View File

@ -152,9 +152,8 @@ int current_function_args_size;
int current_function_pretend_args_size;
/* # of bytes of outgoing arguments required to be pushed by the prologue.
If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
and no stack adjusts will be done on function calls. */
/* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
defined, the needed space is pushed by the prologue. */
int current_function_outgoing_args_size;
@ -356,6 +355,8 @@ struct temp_slot
tree rtl_expr;
/* Non-zero if this temporary is currently in use. */
char in_use;
/* Non-zero if this temporary has its address taken. */
char addr_taken;
/* Nesting level at which this slot is being used. */
int level;
/* Non-zero if this should survive a call to free_temp_slots. */
@ -461,7 +462,8 @@ find_function_data (decl)
since this function knows only about language-independent variables. */
void
push_function_context ()
push_function_context_to (toplevel)
int toplevel;
{
struct function *p = (struct function *) xmalloc (sizeof (struct function));
@ -512,7 +514,7 @@ push_function_context ()
p->fixup_var_refs_queue = 0;
p->epilogue_delay_list = current_function_epilogue_delay_list;
save_tree_status (p);
save_tree_status (p, toplevel);
save_storage_status (p);
save_emit_status (p);
init_emit ();
@ -524,11 +526,18 @@ push_function_context ()
(*save_machine_status) (p);
}
void
push_function_context ()
{
push_function_context_to (0);
}
/* Restore the last saved context, at the end of a nested function.
This function is called from language-specific code. */
void
pop_function_context ()
pop_function_context_from (toplevel)
int toplevel;
{
struct function *p = outer_function_chain;
@ -545,7 +554,8 @@ pop_function_context ()
current_function_calls_alloca = p->calls_alloca;
current_function_has_nonlocal_label = p->has_nonlocal_label;
current_function_has_nonlocal_goto = p->has_nonlocal_goto;
current_function_contains_functions = 1;
if (! toplevel)
current_function_contains_functions = 1;
current_function_args_size = p->args_size;
current_function_pretend_args_size = p->pretend_args_size;
current_function_arg_offset_rtx = p->arg_offset_rtx;
@ -577,8 +587,9 @@ pop_function_context ()
temp_slots = p->temp_slots;
temp_slot_level = p->temp_slot_level;
current_function_epilogue_delay_list = p->epilogue_delay_list;
reg_renumber = 0;
restore_tree_status (p);
restore_tree_status (p, toplevel);
restore_storage_status (p);
restore_expr_status (p);
restore_emit_status (p);
@ -602,6 +613,11 @@ pop_function_context ()
rtx_equal_function_value_matters = 1;
virtuals_instantiated = 0;
}
void pop_function_context ()
{
pop_function_context_from (0);
}
/* Allocate fixed slots in the stack frame of the current function. */
@ -784,6 +800,11 @@ assign_stack_temp (mode, size, keep)
{
struct temp_slot *p, *best_p = 0;
/* If SIZE is -1 it means that somebody tried to allocate a temporary
of a variable size. */
if (size == -1)
abort ();
/* First try to find an available, already-allocated temporary that is the
exact size we require. */
for (p = temp_slots; p; p = p->next)
@ -812,7 +833,7 @@ assign_stack_temp (mode, size, keep)
if (best_p->size - rounded_size >= alignment)
{
p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
p->in_use = 0;
p->in_use = p->addr_taken = 0;
p->size = best_p->size - rounded_size;
p->slot = gen_rtx (MEM, BLKmode,
plus_constant (XEXP (best_p->slot, 0),
@ -845,7 +866,9 @@ assign_stack_temp (mode, size, keep)
}
p->in_use = 1;
p->addr_taken = 0;
p->rtl_expr = sequence_rtl_expr;
if (keep == 2)
{
p->level = target_temp_slot_level;
@ -969,6 +992,28 @@ update_temp_slot_address (old, new)
}
}
/* If X could be a reference to a temporary slot, mark the fact that its
adddress was taken. */
void
mark_temp_addr_taken (x)
rtx x;
{
struct temp_slot *p;
if (x == 0)
return;
/* If X is not in memory or is at a constant address, it cannot be in
a temporary slot. */
if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
return;
p = find_temp_slot_from_address (XEXP (x, 0));
if (p != 0)
p->addr_taken = 1;
}
/* If X could be a reference to a temporary slot, mark that slot as belonging
to the to one level higher. If X matched one of our slots, just mark that
one. Otherwise, we can't easily predict which it is, so upgrade all of
@ -981,31 +1026,52 @@ void
preserve_temp_slots (x)
rtx x;
{
struct temp_slot *p;
struct temp_slot *p = 0;
/* If there is no result, we still might have some objects whose address
were taken, so we need to make sure they stay around. */
if (x == 0)
return;
{
for (p = temp_slots; p; p = p->next)
if (p->in_use && p->level == temp_slot_level && p->addr_taken)
p->level--;
return;
}
/* If X is a register that is being used as a pointer, see if we have
a temporary slot we know it points to. To be consistent with
the code below, we really should preserve all non-kept slots
if we can't find a match, but that seems to be much too costly. */
if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x))
&& (p = find_temp_slot_from_address (x)) != 0)
if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
p = find_temp_slot_from_address (x);
/* If X is not in memory or is at a constant address, it cannot be in
a temporary slot, but it can contain something whose address was
taken. */
if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
{
p->level--;
for (p = temp_slots; p; p = p->next)
if (p->in_use && p->level == temp_slot_level && p->addr_taken)
p->level--;
return;
}
/* If X is not in memory or is at a constant address, it cannot be in
a temporary slot. */
if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
return;
/* First see if we can find a match. */
p = find_temp_slot_from_address (XEXP (x, 0));
if (p == 0)
p = find_temp_slot_from_address (XEXP (x, 0));
if (p != 0)
{
/* Move everything at our level whose address was taken to our new
level in case we used its address. */
struct temp_slot *q;
for (q = temp_slots; q; q = q->next)
if (q != p && q->addr_taken && q->level == p->level)
q->level--;
p->level--;
return;
}
@ -2191,6 +2257,8 @@ optimize_bit_field (body, insn, equiv_mem)
and then for which byte of the word is wanted. */
register int offset = INTVAL (XEXP (bitfield, 2));
rtx insns;
/* Adjust OFFSET to count bits from low-address byte. */
#if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
@ -2209,8 +2277,12 @@ optimize_bit_field (body, insn, equiv_mem)
#endif
}
memref = change_address (memref, mode,
start_sequence ();
memref = change_address (memref, mode,
plus_constant (XEXP (memref, 0), offset));
insns = get_insns ();
end_sequence ();
emit_insns_before (insns, insn);
/* Store this memory reference where
we found the bit field reference. */
@ -3150,6 +3222,13 @@ assign_parms (fndecl, second_time)
continue;
}
/* If the parm is to be passed as a transparent union, use the
type of the first field for the tests below. We have already
verified that the modes are the same. */
if (DECL_TRANSPARENT_UNION (parm)
|| TYPE_TRANSPARENT_UNION (passed_type))
passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
/* See if this arg was passed by invisible reference. It is if
it is an object whose size depends on the contents of the
object itself or if the machine requires these objects be passed
@ -3426,6 +3505,9 @@ assign_parms (fndecl, second_time)
else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
abort ();
if (TREE_READONLY (parm))
RTX_UNCHANGING_P (stack_parm) = 1;
move_block_from_reg (REGNO (entry_parm),
validize_mem (stack_parm),
size_stored / UNITS_PER_WORD,
@ -4251,7 +4333,9 @@ trampoline_address (function)
/* Find an existing trampoline and return it. */
for (link = trampoline_list; link; link = TREE_CHAIN (link))
if (TREE_PURPOSE (link) == function)
return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0);
return
round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
for (fp = outer_function_chain; fp; fp = fp->next)
for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
if (TREE_PURPOSE (link) == function)
@ -4693,6 +4777,11 @@ mark_varargs ()
/* Expand a call to __main at the beginning of a possible main function. */
#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
#undef HAS_INIT_SECTION
#define HAS_INIT_SECTION
#endif
void
expand_main_function ()
{
@ -4700,10 +4789,10 @@ expand_main_function ()
{
/* The zero below avoids a possible parse error */
0;
#if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main)
#if !defined (HAS_INIT_SECTION)
emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,
VOIDmode, 0);
#endif /* not INIT_SECTION_ASM_OP or INVOKE__main */
#endif /* not HAS_INIT_SECTION */
}
}
@ -4858,7 +4947,7 @@ expand_function_start (subr, parms_have_cleanups)
if (aggregate_value_p (DECL_RESULT (subr)))
{
/* Returning something that won't go in a register. */
register rtx value_address;
register rtx value_address = 0;
#ifdef PCC_STATIC_STRUCT_RETURN
if (current_function_returns_pcc_struct)
@ -4965,11 +5054,23 @@ expand_function_start (subr, parms_have_cleanups)
/* Fetch static chain values for containing functions. */
tem = decl_function_context (current_function_decl);
/* If not doing stupid register allocation, then start off with the static
chain pointer in a pseudo register. Otherwise, we use the stack
address that was generated above. */
/* If not doing stupid register allocation copy the static chain
pointer into a psuedo. If we have small register classes, copy the
value from memory if static_chain_incoming_rtx is a REG. If we do
stupid register allocation, we use the stack address generated above. */
if (tem && ! obey_regdecls)
last_ptr = copy_to_reg (static_chain_incoming_rtx);
{
#ifdef SMALL_REGISTER_CLASSES
/* If the static chain originally came in a register, put it back
there, then move it out in the next insn. The reason for
this peculiar code is to satisfy function integration. */
if (GET_CODE (static_chain_incoming_rtx) == REG)
emit_move_insn (static_chain_incoming_rtx, last_ptr);
#endif
last_ptr = copy_to_reg (static_chain_incoming_rtx);
}
context_display = 0;
while (tem)
{
@ -5036,7 +5137,9 @@ expand_function_end (filename, line, end_bindings)
on a machine that fails to restore the registers. */
if (NON_SAVING_SETJMP && current_function_calls_setjmp)
{
setjmp_protect (DECL_INITIAL (current_function_decl));
if (DECL_INITIAL (current_function_decl) != error_mark_node)
setjmp_protect (DECL_INITIAL (current_function_decl));
setjmp_protect_args ();
}
#endif

View File

@ -14,16 +14,23 @@ extern int errno;
BSD systems) now provides getcwd as called for by POSIX. Allow for
the few exceptions to the general rule here. */
#if !(defined (POSIX) || defined (USG) || defined (VMS))
#if !(defined (POSIX) || defined (USG) || defined (VMS)) || defined (HAVE_GETWD)
#include <sys/param.h>
extern char *getwd ();
#define getcwd(buf,len) getwd(buf)
#ifdef MAXPATHLEN
#define GUESSPATHLEN (MAXPATHLEN + 1)
#else
#define GUESSPATHLEN 100
#endif
#else /* (defined (USG) || defined (VMS)) */
extern char *getcwd ();
/* We actually use this as a starting point, not a limit. */
#define GUESSPATHLEN 100
#endif /* (defined (USG) || defined (VMS)) */
#ifdef WINNT
#include <direct.h>
#endif
char *getenv ();
char *xmalloc ();

View File

@ -446,6 +446,18 @@ global_alloc (file)
if (regs_ever_live[i])
local_reg_n_refs[i] = 0;
/* Likewise for regs used in a SCRATCH. */
for (i = 0; i < scratch_list_length; i++)
if (scratch_list[i])
{
int regno = REGNO (scratch_list[i]);
int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch_list[i]));
int j;
for (j = regno; j < lim; j++)
local_reg_n_refs[j] = 0;
}
/* Allocate the space for the conflict and preference tables and
initialize them. */
@ -923,6 +935,12 @@ find_reg (allocno, losers, alt_regs_p, accept_call_clobbered, retrying)
IOR_HARD_REG_SET (used1, hard_reg_conflicts[allocno]);
#ifdef CLASS_CANNOT_CHANGE_SIZE
if (reg_changes_size[allocno_reg[allocno]])
IOR_HARD_REG_SET (used1,
reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE]);
#endif
/* Try each hard reg to see if it fits. Do this in two passes.
In the first pass, skip registers that are preferred by some other pseudo
to give it a better chance of getting one of those registers. Only if
@ -1097,27 +1115,42 @@ find_reg (allocno, losers, alt_regs_p, accept_call_clobbered, retrying)
/* Don't use a reg no good for this pseudo. */
&& ! TEST_HARD_REG_BIT (used2, regno)
&& HARD_REGNO_MODE_OK (regno, mode)
&& (((double) local_reg_n_refs[regno]
/ local_reg_live_length[regno])
< ((double) allocno_n_refs[allocno]
/ allocno_live_length[allocno])))
#ifdef CLASS_CANNOT_CHANGE_SIZE
&& ! (reg_changes_size[allocno_reg[allocno]]
&& (TEST_HARD_REG_BIT
(reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
regno)))
#endif
)
{
/* Hard reg REGNO was used less in total by local regs
than it would be used by this one allocno! */
int k;
for (k = 0; k < max_regno; k++)
if (reg_renumber[k] >= 0)
{
int r = reg_renumber[k];
int endregno
= r + HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (k));
/* We explicitly evaluate the divide results into temporary
variables so as to avoid excess precision problems that occur
on a i386-unknown-sysv4.2 (unixware) host. */
double tmp1 = ((double) local_reg_n_refs[regno]
/ local_reg_live_length[regno]);
double tmp2 = ((double) allocno_n_refs[allocno]
/ allocno_live_length[allocno]);
if (regno >= r && regno < endregno)
reg_renumber[k] = -1;
}
if (tmp1 < tmp2)
{
/* Hard reg REGNO was used less in total by local regs
than it would be used by this one allocno! */
int k;
for (k = 0; k < max_regno; k++)
if (reg_renumber[k] >= 0)
{
int r = reg_renumber[k];
int endregno
= r + HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (k));
best_reg = regno;
break;
if (regno >= r && regno < endregno)
reg_renumber[k] = -1;
}
best_reg = regno;
break;
}
}
}
}

View File

@ -659,6 +659,15 @@ gen_movsi (operand0, operand1)
if (flag_pic && SYMBOLIC_CONST (operands[1]))
emit_pic_move (operands, SImode);
/* Don't generate memory->memory moves, go through a register */
else if (TARGET_MOVE
&& (reload_in_progress | reload_completed) == 0
&& GET_CODE (operands[0]) == MEM
&& GET_CODE (operands[1]) == MEM)
{
operands[1] = force_reg (SImode, operands[1]);
}
}
operand0 = operands[0];
operand1 = operands[1];
@ -677,9 +686,32 @@ gen_movhi (operand0, operand1)
rtx operand0;
rtx operand1;
{
return gen_rtx (SET, VOIDmode,
rtx operands[2];
rtx _val = 0;
start_sequence ();
operands[0] = operand0;
operands[1] = operand1;
{
/* Don't generate memory->memory moves, go through a register */
if (TARGET_MOVE
&& (reload_in_progress | reload_completed) == 0
&& GET_CODE (operands[0]) == MEM
&& GET_CODE (operands[1]) == MEM)
{
operands[1] = force_reg (HImode, operands[1]);
}
}
operand0 = operands[0];
operand1 = operands[1];
emit_insn (gen_rtx (SET, VOIDmode,
operand0,
operand1);
operand1));
_done:
_val = gen_sequence ();
_fail:
end_sequence ();
return _val;
}
rtx
@ -687,16 +719,156 @@ gen_movstricthi (operand0, operand1)
rtx operand0;
rtx operand1;
{
return gen_rtx (SET, VOIDmode,
rtx operands[2];
rtx _val = 0;
start_sequence ();
operands[0] = operand0;
operands[1] = operand1;
{
/* Don't generate memory->memory moves, go through a register */
if (TARGET_MOVE
&& (reload_in_progress | reload_completed) == 0
&& GET_CODE (operands[0]) == MEM
&& GET_CODE (operands[1]) == MEM)
{
operands[1] = force_reg (HImode, operands[1]);
}
}
operand0 = operands[0];
operand1 = operands[1];
emit_insn (gen_rtx (SET, VOIDmode,
gen_rtx (STRICT_LOW_PART, VOIDmode,
operand0),
operand1);
operand1));
_done:
_val = gen_sequence ();
_fail:
end_sequence ();
return _val;
}
rtx
gen_movqi (operand0, operand1)
rtx operand0;
rtx operand1;
{
rtx operands[2];
rtx _val = 0;
start_sequence ();
operands[0] = operand0;
operands[1] = operand1;
{
/* Don't generate memory->memory moves, go through a register */
if (TARGET_MOVE
&& (reload_in_progress | reload_completed) == 0
&& GET_CODE (operands[0]) == MEM
&& GET_CODE (operands[1]) == MEM)
{
operands[1] = force_reg (QImode, operands[1]);
}
}
operand0 = operands[0];
operand1 = operands[1];
emit_insn (gen_rtx (SET, VOIDmode,
operand0,
operand1));
_done:
_val = gen_sequence ();
_fail:
end_sequence ();
return _val;
}
rtx
gen_movstrictqi (operand0, operand1)
rtx operand0;
rtx operand1;
{
rtx operands[2];
rtx _val = 0;
start_sequence ();
operands[0] = operand0;
operands[1] = operand1;
{
/* Don't generate memory->memory moves, go through a register */
if (TARGET_MOVE
&& (reload_in_progress | reload_completed) == 0
&& GET_CODE (operands[0]) == MEM
&& GET_CODE (operands[1]) == MEM)
{
operands[1] = force_reg (QImode, operands[1]);
}
}
operand0 = operands[0];
operand1 = operands[1];
emit_insn (gen_rtx (SET, VOIDmode,
gen_rtx (STRICT_LOW_PART, VOIDmode,
operand0),
operand1));
_done:
_val = gen_sequence ();
_fail:
end_sequence ();
return _val;
}
rtx
gen_movsf (operand0, operand1)
rtx operand0;
rtx operand1;
{
rtx operands[2];
rtx _val = 0;
start_sequence ();
operands[0] = operand0;
operands[1] = operand1;
{
/* Special case memory->memory moves and pushes */
if (TARGET_MOVE
&& (reload_in_progress | reload_completed) == 0
&& GET_CODE (operands[0]) == MEM
&& (GET_CODE (operands[1]) == MEM || push_operand (operands[0], SFmode)))
{
rtx (*genfunc) PROTO((rtx, rtx)) = (push_operand (operands[0], SFmode))
? gen_movsf_push
: gen_movsf_mem;
emit_insn ((*genfunc) (operands[0], operands[1]));
DONE;
}
/* If we are loading a floating point constant that isn't 0 or 1 into a register,
indicate we need the pic register loaded. This could be optimized into stores
of constants if the target eventually moves to memory, but better safe than
sorry. */
if (flag_pic
&& GET_CODE (operands[0]) != MEM
&& GET_CODE (operands[1]) == CONST_DOUBLE
&& !standard_80387_constant_p (operands[1]))
{
current_function_uses_pic_offset_table = 1;
}
}
operand0 = operands[0];
operand1 = operands[1];
emit_insn (gen_rtx (SET, VOIDmode,
operand0,
operand1));
_done:
_val = gen_sequence ();
_fail:
end_sequence ();
return _val;
}
rtx
gen_movsf_push_nomove (operand0, operand1)
rtx operand0;
rtx operand1;
{
return gen_rtx (SET, VOIDmode,
operand0,
@ -704,18 +876,147 @@ gen_movqi (operand0, operand1)
}
rtx
gen_movstrictqi (operand0, operand1)
gen_movsf_push (operand0, operand1)
rtx operand0;
rtx operand1;
{
return gen_rtx (PARALLEL, VOIDmode, gen_rtvec (2,
gen_rtx (SET, VOIDmode,
operand0,
operand1),
gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0))));
}
rtx
gen_movsf_mem (operand0, operand1)
rtx operand0;
rtx operand1;
{
return gen_rtx (PARALLEL, VOIDmode, gen_rtvec (2,
gen_rtx (SET, VOIDmode,
operand0,
operand1),
gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0))));
}
rtx
gen_movsf_normal (operand0, operand1)
rtx operand0;
rtx operand1;
{
return gen_rtx (SET, VOIDmode,
gen_rtx (STRICT_LOW_PART, VOIDmode,
operand0),
operand0,
operand1);
}
rtx
gen_movsf (operand0, operand1)
gen_swapsf (operand0, operand1)
rtx operand0;
rtx operand1;
{
return gen_rtx (PARALLEL, VOIDmode, gen_rtvec (2,
gen_rtx (SET, VOIDmode,
operand0,
operand1),
gen_rtx (SET, VOIDmode,
operand1,
operand0)));
}
rtx
gen_movdf (operand0, operand1)
rtx operand0;
rtx operand1;
{
rtx operands[2];
rtx _val = 0;
start_sequence ();
operands[0] = operand0;
operands[1] = operand1;
{
/* Special case memory->memory moves and pushes */
if (TARGET_MOVE
&& (reload_in_progress | reload_completed) == 0
&& GET_CODE (operands[0]) == MEM
&& (GET_CODE (operands[1]) == MEM || push_operand (operands[0], DFmode)))
{
rtx (*genfunc) PROTO((rtx, rtx)) = (push_operand (operands[0], DFmode))
? gen_movdf_push
: gen_movdf_mem;
emit_insn ((*genfunc) (operands[0], operands[1]));
DONE;
}
/* If we are loading a floating point constant that isn't 0 or 1 into a register,
indicate we need the pic register loaded. This could be optimized into stores
of constants if the target eventually moves to memory, but better safe than
sorry. */
if (flag_pic
&& GET_CODE (operands[0]) != MEM
&& GET_CODE (operands[1]) == CONST_DOUBLE
&& !standard_80387_constant_p (operands[1]))
{
current_function_uses_pic_offset_table = 1;
}
}
operand0 = operands[0];
operand1 = operands[1];
emit_insn (gen_rtx (SET, VOIDmode,
operand0,
operand1));
_done:
_val = gen_sequence ();
_fail:
end_sequence ();
return _val;
}
rtx
gen_movdf_push_nomove (operand0, operand1)
rtx operand0;
rtx operand1;
{
return gen_rtx (SET, VOIDmode,
operand0,
operand1);
}
rtx
gen_movdf_push (operand0, operand1)
rtx operand0;
rtx operand1;
{
return gen_rtx (PARALLEL, VOIDmode, gen_rtvec (3,
gen_rtx (SET, VOIDmode,
operand0,
operand1),
gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0)),
gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0))));
}
rtx
gen_movdf_mem (operand0, operand1)
rtx operand0;
rtx operand1;
{
return gen_rtx (PARALLEL, VOIDmode, gen_rtvec (3,
gen_rtx (SET, VOIDmode,
operand0,
operand1),
gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0)),
gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0))));
}
rtx
gen_movdf_normal (operand0, operand1)
rtx operand0;
rtx operand1;
{
@ -739,7 +1040,97 @@ gen_swapdf (operand0, operand1)
}
rtx
gen_movdf (operand0, operand1)
gen_movxf (operand0, operand1)
rtx operand0;
rtx operand1;
{
rtx operands[2];
rtx _val = 0;
start_sequence ();
operands[0] = operand0;
operands[1] = operand1;
{
/* Special case memory->memory moves and pushes */
if (TARGET_MOVE
&& (reload_in_progress | reload_completed) == 0
&& GET_CODE (operands[0]) == MEM
&& (GET_CODE (operands[1]) == MEM || push_operand (operands[0], XFmode)))
{
rtx (*genfunc) PROTO((rtx, rtx)) = (push_operand (operands[0], XFmode))
? gen_movxf_push
: gen_movxf_mem;
emit_insn ((*genfunc) (operands[0], operands[1]));
DONE;
}
/* If we are loading a floating point constant that isn't 0 or 1 into a register,
indicate we need the pic register loaded. This could be optimized into stores
of constants if the target eventually moves to memory, but better safe than
sorry. */
if (flag_pic
&& GET_CODE (operands[0]) != MEM
&& GET_CODE (operands[1]) == CONST_DOUBLE
&& !standard_80387_constant_p (operands[1]))
{
current_function_uses_pic_offset_table = 1;
}
}
operand0 = operands[0];
operand1 = operands[1];
emit_insn (gen_rtx (SET, VOIDmode,
operand0,
operand1));
_done:
_val = gen_sequence ();
_fail:
end_sequence ();
return _val;
}
rtx
gen_movxf_push_nomove (operand0, operand1)
rtx operand0;
rtx operand1;
{
return gen_rtx (SET, VOIDmode,
operand0,
operand1);
}
rtx
gen_movxf_push (operand0, operand1)
rtx operand0;
rtx operand1;
{
return gen_rtx (PARALLEL, VOIDmode, gen_rtvec (3,
gen_rtx (SET, VOIDmode,
operand0,
operand1),
gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0)),
gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0))));
}
rtx
gen_movxf_mem (operand0, operand1)
rtx operand0;
rtx operand1;
{
return gen_rtx (PARALLEL, VOIDmode, gen_rtvec (3,
gen_rtx (SET, VOIDmode,
operand0,
operand1),
gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0)),
gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0))));
}
rtx
gen_movxf_normal (operand0, operand1)
rtx operand0;
rtx operand1;
{
@ -762,24 +1153,19 @@ gen_swapxf (operand0, operand1)
operand0)));
}
rtx
gen_movxf (operand0, operand1)
rtx operand0;
rtx operand1;
{
return gen_rtx (SET, VOIDmode,
operand0,
operand1);
}
rtx
gen_movdi (operand0, operand1)
rtx operand0;
rtx operand1;
{
return gen_rtx (SET, VOIDmode,
return gen_rtx (PARALLEL, VOIDmode, gen_rtvec (3,
gen_rtx (SET, VOIDmode,
operand0,
operand1);
operand1),
gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0)),
gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0))));
}
rtx
@ -1477,11 +1863,14 @@ gen_adddi3 (operand0, operand1, operand2)
rtx operand1;
rtx operand2;
{
return gen_rtx (SET, VOIDmode,
return gen_rtx (PARALLEL, VOIDmode, gen_rtvec (2,
gen_rtx (SET, VOIDmode,
operand0,
gen_rtx (PLUS, DImode,
operand1,
operand2));
operand2)),
gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0))));
}
rtx
@ -1523,6 +1912,16 @@ gen_addqi3 (operand0, operand1, operand2)
operand2));
}
rtx
gen_movsi_lea (operand0, operand1)
rtx operand0;
rtx operand1;
{
return gen_rtx (SET, VOIDmode,
operand0,
operand1);
}
rtx
gen_addxf3 (operand0, operand1, operand2)
rtx operand0;
@ -1568,11 +1967,14 @@ gen_subdi3 (operand0, operand1, operand2)
rtx operand1;
rtx operand2;
{
return gen_rtx (SET, VOIDmode,
return gen_rtx (PARALLEL, VOIDmode, gen_rtvec (2,
gen_rtx (SET, VOIDmode,
operand0,
gen_rtx (MINUS, DImode,
operand1,
operand2));
operand2)),
gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0))));
}
rtx
@ -1739,6 +2141,48 @@ gen_mulsidi3 (operand0, operand1, operand2)
operand2)));
}
rtx
gen_umulsi3_highpart (operand0, operand1, operand2)
rtx operand0;
rtx operand1;
rtx operand2;
{
return gen_rtx (PARALLEL, VOIDmode, gen_rtvec (2,
gen_rtx (SET, VOIDmode,
operand0,
gen_rtx (TRUNCATE, SImode,
gen_rtx (LSHIFTRT, DImode,
gen_rtx (MULT, DImode,
gen_rtx (ZERO_EXTEND, DImode,
operand1),
gen_rtx (ZERO_EXTEND, DImode,
operand2)),
GEN_INT (32)))),
gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0))));
}
rtx
gen_smulsi3_highpart (operand0, operand1, operand2)
rtx operand0;
rtx operand1;
rtx operand2;
{
return gen_rtx (PARALLEL, VOIDmode, gen_rtvec (2,
gen_rtx (SET, VOIDmode,
operand0,
gen_rtx (TRUNCATE, SImode,
gen_rtx (LSHIFTRT, DImode,
gen_rtx (MULT, DImode,
gen_rtx (SIGN_EXTEND, DImode,
operand1),
gen_rtx (SIGN_EXTEND, DImode,
operand2)),
GEN_INT (32)))),
gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0))));
}
rtx
gen_mulxf3 (operand0, operand1, operand2)
rtx operand0;
@ -3554,21 +3998,23 @@ gen_untyped_call (operand0, operand1, operand2)
operands[2] = operand2;
{
rtx addr;
int i;
if (flag_pic)
current_function_uses_pic_offset_table = 1;
emit_call_insn (gen_call (operands[0], const0_rtx, NULL, const0_rtx));
/* With half-pic, force the address into a register. */
addr = XEXP (operands[0], 0);
if (GET_CODE (addr) != REG && HALF_PIC_P () && !CONSTANT_ADDRESS_P (addr))
XEXP (operands[0], 0) = force_reg (Pmode, addr);
for (i = 0; i < XVECLEN (operands[2], 0); i++)
{
rtx set = XVECEXP (operands[2], 0, i);
emit_move_insn (SET_DEST (set), SET_SRC (set));
}
operands[1] = change_address (operands[1], DImode, XEXP (operands[1], 0));
if (! expander_call_insn_operand (operands[1], QImode))
operands[1]
= change_address (operands[1], VOIDmode,
copy_to_mode_reg (Pmode, XEXP (operands[1], 0)));
/* The optimizer does not know that the call sets the function value
registers we stored in the result block. We avoid problems by
claiming that all hard registers are used and clobbered at this
point. */
emit_insn (gen_blockage ());
DONE;
}
operand0 = operands[0];
operand1 = operands[1];
@ -3588,59 +4034,11 @@ gen_untyped_call (operand0, operand1, operand2)
}
rtx
gen_untyped_return (operand0, operand1)
rtx operand0;
rtx operand1;
gen_blockage ()
{
rtx operands[2];
rtx _val = 0;
start_sequence ();
operands[0] = operand0;
operands[1] = operand1;
{
rtx valreg1 = gen_rtx (REG, SImode, 0);
rtx valreg2 = gen_rtx (REG, SImode, 1);
rtx result = operands[0];
/* Restore the FPU state. */
emit_insn (gen_update_return (change_address (result, SImode,
plus_constant (XEXP (result, 0),
8))));
/* Reload the function value registers. */
emit_move_insn (valreg1, change_address (result, SImode, XEXP (result, 0)));
emit_move_insn (valreg2,
change_address (result, SImode,
plus_constant (XEXP (result, 0), 4)));
/* Put USE insns before the return. */
emit_insn (gen_rtx (USE, VOIDmode, valreg1));
emit_insn (gen_rtx (USE, VOIDmode, valreg2));
/* Construct the return. */
expand_null_return ();
DONE;
}
operand0 = operands[0];
operand1 = operands[1];
emit (operand0);
emit (operand1);
_done:
_val = gen_sequence ();
_fail:
end_sequence ();
return _val;
}
rtx
gen_update_return (operand0)
rtx operand0;
{
return gen_rtx (UNSPEC, SImode,
return gen_rtx (UNSPEC_VOLATILE, VOIDmode,
gen_rtvec (1,
operand0),
const0_rtx),
0);
}
@ -3916,25 +4314,43 @@ add_clobbers (pattern, insn_code_number)
switch (insn_code_number)
{
case 264:
XVECEXP (pattern, 0, 1) = gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0));
break;
case 95:
case 94:
case 93:
case 114:
case 113:
case 112:
XVECEXP (pattern, 0, 3) = gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0));
break;
case 89:
case 88:
case 87:
case 108:
case 107:
case 106:
XVECEXP (pattern, 0, 4) = gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0));
break;
case 84:
case 83:
case 80:
case 79:
case 74:
case 73:
XVECEXP (pattern, 0, 1) = gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0));
XVECEXP (pattern, 0, 2) = gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0));
break;
case 285:
case 151:
case 150:
case 135:
case 127:
case 68:
case 67:
XVECEXP (pattern, 0, 1) = gen_rtx (CLOBBER, VOIDmode,
gen_rtx (SCRATCH, SImode, 0));
break;
case 33:
case 32:
case 31:

View File

@ -9,12 +9,6 @@ extern rtx recog_operand[];
extern rtx *recog_operand_loc[];
extern rtx *recog_dup_loc[];
extern char recog_dup_num[];
extern
#ifdef __GNUC__
__volatile__
#endif
void fatal_insn_not_found ();
void
insn_extract (insn)
rtx insn;
@ -27,7 +21,7 @@ insn_extract (insn)
case -1:
fatal_insn_not_found (insn);
case 308:
case 326:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XVECEXP (XEXP (XVECEXP (pat, 0, 0), 1), 0, 0), 0));
ro[2] = *(ro_loc[2] = &XVECEXP (XEXP (XVECEXP (pat, 0, 0), 1), 0, 1));
@ -36,38 +30,38 @@ insn_extract (insn)
recog_dup_num[0] = 1;
break;
case 306:
case 303:
case 302:
case 300:
case 299:
case 324:
case 321:
case 320:
case 318:
case 317:
ro[0] = *(ro_loc[0] = &XEXP (pat, 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (pat, 1), 0));
ro[2] = *(ro_loc[2] = &XEXP (XEXP (XEXP (pat, 1), 1), 0));
ro[3] = *(ro_loc[3] = &XEXP (pat, 1));
break;
case 305:
case 301:
case 298:
case 297:
case 295:
case 323:
case 319:
case 316:
case 315:
case 313:
ro[0] = *(ro_loc[0] = &XEXP (pat, 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XEXP (pat, 1), 0), 0));
ro[2] = *(ro_loc[2] = &XEXP (XEXP (pat, 1), 1));
ro[3] = *(ro_loc[3] = &XEXP (pat, 1));
break;
case 304:
case 296:
case 294:
case 322:
case 314:
case 312:
ro[0] = *(ro_loc[0] = &XEXP (pat, 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (pat, 1), 0));
ro[2] = *(ro_loc[2] = &XEXP (XEXP (pat, 1), 1));
ro[3] = *(ro_loc[3] = &XEXP (pat, 1));
break;
case 289:
case 307:
ro[0] = *(ro_loc[0] = &XEXP (XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 1), 0));
ro[2] = *(ro_loc[2] = &XEXP (XVECEXP (pat, 0, 1), 0));
@ -80,7 +74,7 @@ insn_extract (insn)
recog_dup_num[2] = 0;
break;
case 288:
case 306:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 0), 0));
ro[2] = *(ro_loc[2] = &XEXP (XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 1), 0));
@ -94,7 +88,7 @@ insn_extract (insn)
recog_dup_num[2] = 1;
break;
case 286:
case 304:
ro[0] = *(ro_loc[0] = &XEXP (XEXP (XVECEXP (pat, 0, 0), 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 0));
ro[2] = *(ro_loc[2] = &XEXP (XVECEXP (pat, 0, 1), 0));
@ -106,33 +100,18 @@ insn_extract (insn)
recog_dup_num[1] = 0;
break;
case 284:
case 283:
case 302:
case 301:
case 300:
break;
case 282:
ro[0] = *(ro_loc[0] = &XVECEXP (pat, 0, 0));
break;
case 280:
ro[0] = *(ro_loc[0] = &XEXP (XEXP (XVECEXP (pat, 0, 0), 0), 0));
ro[1] = *(ro_loc[1] = &XVECEXP (pat, 0, 1));
ro[2] = *(ro_loc[2] = &XVECEXP (pat, 0, 2));
break;
case 279:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 0));
ro[1] = *(ro_loc[1] = &XVECEXP (pat, 0, 1));
ro[2] = *(ro_loc[2] = &XVECEXP (pat, 0, 2));
break;
case 277:
case 298:
ro[0] = *(ro_loc[0] = &XEXP (pat, 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XEXP (pat, 1), 0), 0));
ro[2] = *(ro_loc[2] = &XEXP (XEXP (pat, 1), 1));
break;
case 274:
case 295:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 0), 0));
ro[2] = *(ro_loc[2] = &XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 1));
@ -141,7 +120,7 @@ insn_extract (insn)
ro[4] = *(ro_loc[4] = &XEXP (XEXP (XVECEXP (pat, 0, 1), 1), 1));
break;
case 273:
case 294:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 0));
ro[2] = *(ro_loc[2] = &XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 1));
@ -150,7 +129,7 @@ insn_extract (insn)
ro[4] = *(ro_loc[4] = &XEXP (XEXP (XVECEXP (pat, 0, 1), 1), 1));
break;
case 268:
case 289:
ro[0] = *(ro_loc[0] = &XEXP (XEXP (XVECEXP (pat, 0, 0), 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XVECEXP (pat, 0, 0), 1));
ro[2] = const0_rtx;
@ -158,7 +137,7 @@ insn_extract (insn)
ro[3] = *(ro_loc[3] = &XEXP (XEXP (XVECEXP (pat, 0, 1), 1), 1));
break;
case 267:
case 288:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XVECEXP (pat, 0, 0), 1));
ro[2] = const0_rtx;
@ -166,85 +145,85 @@ insn_extract (insn)
ro[3] = *(ro_loc[3] = &XEXP (XEXP (XVECEXP (pat, 0, 1), 1), 1));
break;
case 265:
case 286:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 1));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0));
break;
case 264:
case 285:
ro[0] = *(ro_loc[0] = &XEXP (XEXP (XEXP (XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 1), 0), 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XEXP (XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 1), 0), 1), 0));
ro[2] = *(ro_loc[2] = &XEXP (XVECEXP (pat, 0, 1), 0));
break;
case 261:
case 282:
ro[0] = *(ro_loc[0] = &XEXP (XEXP (pat, 1), 0));
break;
case 260:
case 259:
case 258:
case 257:
case 256:
case 255:
case 254:
case 253:
case 252:
case 251:
case 281:
case 280:
case 279:
case 278:
case 277:
case 276:
case 275:
case 274:
case 273:
case 272:
ro[0] = *(ro_loc[0] = &XEXP (XEXP (XEXP (pat, 1), 2), 0));
break;
case 250:
case 248:
case 246:
case 244:
case 242:
case 240:
case 238:
case 236:
case 234:
case 232:
case 271:
case 269:
case 267:
case 265:
case 263:
case 261:
case 259:
case 257:
case 255:
case 253:
ro[0] = *(ro_loc[0] = &XEXP (XEXP (XEXP (pat, 1), 1), 0));
break;
case 230:
case 228:
case 226:
case 224:
case 222:
case 220:
case 218:
case 216:
case 214:
case 212:
case 251:
case 249:
case 247:
case 245:
case 243:
case 241:
case 239:
case 237:
case 235:
case 233:
ro[0] = *(ro_loc[0] = &XEXP (pat, 0));
break;
case 210:
case 209:
case 231:
case 230:
ro[0] = *(ro_loc[0] = &XEXP (XEXP (pat, 1), 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (pat, 1), 1));
ro[2] = *(ro_loc[2] = &XEXP (XEXP (pat, 1), 2));
break;
case 208:
case 229:
ro[0] = *(ro_loc[0] = &XEXP (XEXP (pat, 1), 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (pat, 1), 2));
break;
case 207:
case 228:
ro[0] = *(ro_loc[0] = &XEXP (pat, 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (pat, 1), 0));
ro[2] = *(ro_loc[2] = &XEXP (XEXP (XEXP (pat, 1), 1), 1));
break;
case 206:
case 227:
ro[0] = *(ro_loc[0] = &XEXP (pat, 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XEXP (pat, 1), 0), 1));
ro[2] = *(ro_loc[2] = &XEXP (XEXP (pat, 1), 1));
break;
case 205:
case 226:
ro[0] = *(ro_loc[0] = &XEXP (XEXP (pat, 0), 0));
ro[1] = const0_rtx;
ro_loc[1] = &junk;
@ -252,9 +231,9 @@ insn_extract (insn)
ro[3] = *(ro_loc[3] = &XEXP (pat, 1));
break;
case 195:
case 189:
case 183:
case 216:
case 210:
case 204:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 0));
ro[2] = *(ro_loc[2] = &XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 1));
@ -262,37 +241,37 @@ insn_extract (insn)
recog_dup_num[0] = 2;
break;
case 177:
case 174:
case 198:
case 195:
ro[0] = *(ro_loc[0] = &XEXP (pat, 0));
ro[1] = *(ro_loc[1] = &XEXP (XVECEXP (XEXP (pat, 1), 0, 0), 0));
break;
case 176:
case 175:
case 173:
case 172:
case 197:
case 196:
case 194:
case 193:
ro[0] = *(ro_loc[0] = &XEXP (pat, 0));
ro[1] = *(ro_loc[1] = &XVECEXP (XEXP (pat, 1), 0, 0));
break;
case 293:
case 291:
case 171:
case 170:
case 168:
case 165:
case 163:
case 160:
case 158:
case 311:
case 309:
case 192:
case 191:
case 189:
case 186:
case 184:
case 181:
case 179:
ro[0] = *(ro_loc[0] = &XEXP (pat, 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XEXP (pat, 1), 0), 0));
break;
case 142:
case 141:
case 140:
case 139:
case 163:
case 162:
case 161:
case 160:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 0));
ro[2] = *(ro_loc[2] = &XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 1));
@ -303,65 +282,79 @@ insn_extract (insn)
recog_dup_num[1] = 2;
break;
case 130:
case 129:
case 128:
case 127:
case 151:
case 150:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XEXP (XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 0), 0), 0), 0));
ro[2] = *(ro_loc[2] = &XEXP (XEXP (XEXP (XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 0), 0), 1), 0));
ro[3] = *(ro_loc[3] = &XEXP (XVECEXP (pat, 0, 1), 0));
break;
case 149:
case 148:
case 147:
case 146:
ro[0] = *(ro_loc[0] = &XEXP (pat, 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XEXP (pat, 1), 0), 0));
ro[2] = *(ro_loc[2] = &XEXP (XEXP (XEXP (pat, 1), 1), 0));
break;
case 276:
case 204:
case 297:
case 225:
case 224:
case 223:
case 222:
case 221:
case 220:
case 219:
case 218:
case 217:
case 215:
case 213:
case 212:
case 211:
case 209:
case 207:
case 206:
case 205:
case 203:
case 202:
case 201:
case 200:
case 199:
case 198:
case 197:
case 196:
case 194:
case 192:
case 191:
case 190:
case 188:
case 186:
case 185:
case 184:
case 182:
case 151:
case 150:
case 149:
case 148:
case 147:
case 146:
case 172:
case 171:
case 170:
case 169:
case 168:
case 167:
case 166:
case 165:
case 164:
case 156:
case 155:
case 145:
case 144:
case 143:
case 135:
case 134:
case 126:
case 125:
case 124:
case 123:
case 119:
case 118:
case 117:
case 116:
case 111:
case 110:
case 109:
case 108:
case 142:
case 138:
case 137:
case 136:
case 130:
case 129:
case 128:
ro[0] = *(ro_loc[0] = &XEXP (pat, 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (pat, 1), 0));
ro[2] = *(ro_loc[2] = &XEXP (XEXP (pat, 1), 1));
break;
case 95:
case 94:
case 93:
case 135:
case 127:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 0));
ro[2] = *(ro_loc[2] = &XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 1));
ro[3] = *(ro_loc[3] = &XEXP (XVECEXP (pat, 0, 1), 0));
break;
case 114:
case 113:
case 112:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 0), 0));
ro[2] = *(ro_loc[2] = &XEXP (XVECEXP (pat, 0, 1), 0));
@ -369,9 +362,9 @@ insn_extract (insn)
ro[4] = *(ro_loc[4] = &XEXP (XVECEXP (pat, 0, 3), 0));
break;
case 89:
case 88:
case 87:
case 108:
case 107:
case 106:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 0), 0));
ro[2] = *(ro_loc[2] = &XEXP (XVECEXP (pat, 0, 2), 0));
@ -381,53 +374,66 @@ insn_extract (insn)
recog_dup_num[0] = 1;
break;
case 78:
case 97:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 0));
ro[2] = *(ro_loc[2] = &XEXP (XVECEXP (pat, 0, 1), 0));
break;
case 201:
case 200:
case 199:
case 190:
case 188:
case 187:
case 185:
case 183:
case 182:
case 180:
case 179:
case 178:
case 169:
case 167:
case 166:
case 164:
case 162:
case 161:
case 159:
case 157:
case 156:
case 155:
case 154:
case 153:
case 152:
case 107:
case 106:
case 105:
case 104:
case 103:
case 102:
case 80:
case 79:
case 76:
case 75:
case 74:
case 73:
case 72:
case 71:
case 70:
case 69:
case 68:
case 67:
case 66:
case 177:
case 176:
case 175:
case 174:
case 173:
case 126:
case 125:
case 124:
case 123:
case 122:
case 121:
case 99:
case 98:
case 95:
case 94:
case 93:
case 92:
case 91:
case 90:
case 89:
case 88:
case 87:
case 86:
case 85:
ro[0] = *(ro_loc[0] = &XEXP (pat, 0));
ro[1] = *(ro_loc[1] = &XEXP (XEXP (pat, 1), 0));
break;
case 62:
case 59:
case 84:
case 83:
case 80:
case 79:
case 74:
case 73:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XVECEXP (pat, 0, 0), 1));
ro[2] = *(ro_loc[2] = &XEXP (XVECEXP (pat, 0, 1), 0));
ro[3] = *(ro_loc[3] = &XEXP (XVECEXP (pat, 0, 2), 0));
break;
case 82:
case 76:
case 70:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XVECEXP (pat, 0, 0), 1));
recog_dup_loc[0] = &XEXP (XVECEXP (pat, 0, 1), 0);
@ -436,28 +442,38 @@ insn_extract (insn)
recog_dup_num[1] = 0;
break;
case 271:
case 55:
case 52:
case 68:
case 67:
ro[0] = *(ro_loc[0] = &XEXP (XVECEXP (pat, 0, 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (XVECEXP (pat, 0, 0), 1));
ro[2] = *(ro_loc[2] = &XEXP (XVECEXP (pat, 0, 1), 0));
break;
case 292:
case 64:
case 57:
ro[0] = *(ro_loc[0] = &XEXP (XEXP (pat, 0), 0));
ro[1] = *(ro_loc[1] = &XEXP (pat, 1));
break;
case 270:
case 112:
case 65:
case 64:
case 63:
case 61:
case 291:
case 131:
case 81:
case 78:
case 75:
case 72:
case 69:
case 66:
case 62:
case 60:
case 59:
case 58:
case 57:
case 56:
case 54:
case 55:
case 53:
case 52:
case 51:
case 50:
case 49:
case 48:
case 47:
case 46:
ro[0] = *(ro_loc[0] = &XEXP (pat, 0));
@ -520,7 +536,7 @@ insn_extract (insn)
ro[1] = *(ro_loc[1] = &XEXP (XVECEXP (pat, 0, 1), 0));
break;
case 262:
case 283:
case 4:
case 2:
case 0:

View File

@ -109,8 +109,14 @@ init_all_optabs ()
smul_optab->handlers[(int) SImode].insn_code = CODE_FOR_mulsi3;
umul_widen_optab->handlers[(int) HImode].insn_code = CODE_FOR_umulqihi3;
smul_widen_optab->handlers[(int) HImode].insn_code = CODE_FOR_mulqihi3;
umul_widen_optab->handlers[(int) DImode].insn_code = CODE_FOR_umulsidi3;
smul_widen_optab->handlers[(int) DImode].insn_code = CODE_FOR_mulsidi3;
if (HAVE_umulsidi3)
umul_widen_optab->handlers[(int) DImode].insn_code = CODE_FOR_umulsidi3;
if (HAVE_mulsidi3)
smul_widen_optab->handlers[(int) DImode].insn_code = CODE_FOR_mulsidi3;
if (HAVE_umulsi3_highpart)
umul_highpart_optab->handlers[(int) SImode].insn_code = CODE_FOR_umulsi3_highpart;
if (HAVE_smulsi3_highpart)
smul_highpart_optab->handlers[(int) SImode].insn_code = CODE_FOR_smulsi3_highpart;
if (HAVE_mulxf3)
smul_optab->handlers[(int) XFmode].insn_code = CODE_FOR_mulxf3;
if (HAVE_muldf3)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -134,10 +134,15 @@ function_cannot_inline_p (fndecl)
if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
return "function with varying-size return value cannot be inline";
/* Cannot inline a function with a varying size argument. */
/* Cannot inline a function with a varying size argument or one that
receives a transparent union. */
for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
return "function with varying-size parameter cannot be inline";
{
if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
return "function with varying-size parameter cannot be inline";
else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
return "function with transparent unit parameter cannot be inline";
}
if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
{
@ -1235,6 +1240,7 @@ expand_inline_function (fndecl, parms, target, ignore, type, structure_value_add
tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
/* Mode of the variable used within the function. */
enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
int invisiref = 0;
/* Make sure this formal has some correspondence in the users code
* before emitting any line notes for it. */
@ -1263,6 +1269,7 @@ expand_inline_function (fndecl, parms, target, ignore, type, structure_value_add
store_expr (arg, stack_slot, 0);
arg_vals[i] = XEXP (stack_slot, 0);
invisiref = 1;
}
else if (GET_CODE (loc) != MEM)
{
@ -1288,8 +1295,11 @@ expand_inline_function (fndecl, parms, target, ignore, type, structure_value_add
be two different pseudos, and `safe_from_p' will make all
sorts of smart assumptions about their not conflicting.
But if ARG_VALS[I] overlaps TARGET, these assumptions are
wrong, so put ARG_VALS[I] into a fresh register. */
wrong, so put ARG_VALS[I] into a fresh register.
Don't worry about invisible references, since their stack
temps will never overlap the target. */
|| (target != 0
&& ! invisiref
&& (GET_CODE (arg_vals[i]) == REG
|| GET_CODE (arg_vals[i]) == SUBREG
|| GET_CODE (arg_vals[i]) == MEM)
@ -1640,7 +1650,7 @@ expand_inline_function (fndecl, parms, target, ignore, type, structure_value_add
for (insn = insns; insn; insn = NEXT_INSN (insn))
{
rtx copy, pattern;
rtx copy, pattern, set;
map->orig_asm_operands_vector = 0;
@ -1648,6 +1658,7 @@ expand_inline_function (fndecl, parms, target, ignore, type, structure_value_add
{
case INSN:
pattern = PATTERN (insn);
set = single_set (insn);
copy = 0;
if (GET_CODE (pattern) == USE
&& GET_CODE (XEXP (pattern, 0)) == REG
@ -1659,33 +1670,47 @@ expand_inline_function (fndecl, parms, target, ignore, type, structure_value_add
/* Ignore setting a function value that we don't want to use. */
if (map->inline_target == 0
&& GET_CODE (pattern) == SET
&& GET_CODE (SET_DEST (pattern)) == REG
&& REG_FUNCTION_VALUE_P (SET_DEST (pattern)))
&& set != 0
&& GET_CODE (SET_DEST (set)) == REG
&& REG_FUNCTION_VALUE_P (SET_DEST (set)))
{
if (volatile_refs_p (SET_SRC (pattern)))
if (volatile_refs_p (SET_SRC (set)))
{
rtx new_set;
/* If we must not delete the source,
load it into a new temporary. */
copy = emit_insn (copy_rtx_and_substitute (pattern, map));
SET_DEST (PATTERN (copy))
= gen_reg_rtx (GET_MODE (SET_DEST (PATTERN (copy))));
new_set = single_set (copy);
if (new_set == 0)
abort ();
SET_DEST (new_set)
= gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
}
else
break;
}
/* If this is setting the static chain rtx, omit it. */
else if (static_chain_value != 0
&& set != 0
&& GET_CODE (SET_DEST (set)) == REG
&& rtx_equal_p (SET_DEST (set),
static_chain_incoming_rtx))
break;
/* If this is setting the static chain pseudo, set it from
the value we want to give it instead. */
else if (static_chain_value != 0
&& GET_CODE (pattern) == SET
&& rtx_equal_p (SET_SRC (pattern),
&& set != 0
&& rtx_equal_p (SET_SRC (set),
static_chain_incoming_rtx))
{
rtx newdest = copy_rtx_and_substitute (SET_DEST (pattern), map);
copy = emit_insn (gen_rtx (SET, VOIDmode, newdest,
static_chain_value));
rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
copy = emit_move_insn (newdest, static_chain_value);
static_chain_value = 0;
}
else
@ -2585,6 +2610,9 @@ subst_constants (loc, insn, map)
/* If storing a recognizable value save it for later recording. */
if ((map->num_sets < MAX_RECOG_OPERANDS)
&& (CONSTANT_P (src)
|| (GET_CODE (src) == REG
&& REGNO (src) >= FIRST_VIRTUAL_REGISTER
&& REGNO (src) <= LAST_VIRTUAL_REGISTER)
|| (GET_CODE (src) == PLUS
&& GET_CODE (XEXP (src, 0)) == REG
&& REGNO (XEXP (src, 0)) >= FIRST_VIRTUAL_REGISTER

View File

@ -661,6 +661,7 @@ jump_optimize (f, cross_jump, noop_moves, after_regscan)
/* Detect jump to following insn. */
if (reallabelprev == insn && condjump_p (insn))
{
next = next_real_insn (JUMP_LABEL (insn));
delete_jump (insn);
changed = 1;
continue;
@ -2202,11 +2203,11 @@ duplicate_loop_exit_test (loop_start)
emit_barrier_before (loop_start);
}
delete_insn (next_nonnote_insn (loop_start));
/* Mark the exit code as the virtual top of the converted loop. */
emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
delete_insn (next_nonnote_insn (loop_start));
return 1;
}

View File

@ -165,6 +165,11 @@ static enum reg_class *qty_alternate_class;
static rtx *qty_scratch_rtx;
/* Element Q is nonzero if this quantity has been used in a SUBREG
that changes its size. */
static char *qty_changes_size;
/* Element Q is the register number of one pseudo register whose
reg_qty value is Q, or -1 is this quantity is for a SCRATCH. This
register should be the head of the chain maintained in reg_next_in_qty. */
@ -289,6 +294,7 @@ alloc_qty (regno, mode, size, birth)
qty_min_class[qty] = reg_preferred_class (regno);
qty_alternate_class[qty] = reg_alternate_class (regno);
qty_n_refs[qty] = reg_n_refs[regno];
qty_changes_size[qty] = reg_changes_size[regno];
}
/* Similar to `alloc_qty', but allocates a quantity for a SCRATCH rtx
@ -378,6 +384,7 @@ alloc_qty_for_scratch (scratch, n, insn, insn_code_num, insn_number)
qty_min_class[qty] = class;
qty_alternate_class[qty] = NO_REGS;
qty_n_refs[qty] = 1;
qty_changes_size[qty] = 0;
}
/* Main entry point of this file. */
@ -439,6 +446,7 @@ local_alloc ()
qty_alternate_class
= (enum reg_class *) alloca (max_qty * sizeof (enum reg_class));
qty_n_refs = (int *) alloca (max_qty * sizeof (int));
qty_changes_size = (char *) alloca (max_qty * sizeof (char));
reg_qty = (int *) alloca (max_regno * sizeof (int));
reg_offset = (char *) alloca (max_regno * sizeof (char));
@ -1925,6 +1933,9 @@ update_qty_class (qty, reg)
rclass = reg_alternate_class (reg);
if (reg_class_subset_p (rclass, qty_alternate_class[qty]))
qty_alternate_class[qty] = rclass;
if (reg_changes_size[reg])
qty_changes_size[qty] = 1;
}
/* Handle something which alters the value of an rtx REG.
@ -2108,6 +2119,12 @@ find_free_reg (class, mode, qty, accept_call_clobbered, just_try_suggested,
SET_HARD_REG_BIT (used, FRAME_POINTER_REGNUM);
#endif
#ifdef CLASS_CANNOT_CHANGE_SIZE
if (qty_changes_size[qty])
IOR_HARD_REG_SET (used,
reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE]);
#endif
/* Normally, the registers that can be used for the first register in
a multi-register quantity are the same as those that can be used for
subsequent registers. However, if just trying suggested registers,

View File

@ -1791,8 +1791,9 @@ move_movables (movables, threshold, insn_count, loop_start, end, nregs)
/* Because the USAGE information potentially
contains objects other than hard registers
we need to copy it. */
CALL_INSN_FUNCTION_USAGE (i1) =
copy_rtx (CALL_INSN_FUNCTION_USAGE (temp));
if (CALL_INSN_FUNCTION_USAGE (temp))
CALL_INSN_FUNCTION_USAGE (i1) =
copy_rtx (CALL_INSN_FUNCTION_USAGE (temp));
}
else
i1 = emit_insn_before (body, loop_start);
@ -1834,8 +1835,9 @@ move_movables (movables, threshold, insn_count, loop_start, end, nregs)
/* Because the USAGE information potentially
contains objects other than hard registers
we need to copy it. */
CALL_INSN_FUNCTION_USAGE (i1) =
copy_rtx (CALL_INSN_FUNCTION_USAGE (p));
if (CALL_INSN_FUNCTION_USAGE (p))
CALL_INSN_FUNCTION_USAGE (i1) =
copy_rtx (CALL_INSN_FUNCTION_USAGE (p));
}
else
i1 = emit_insn_before (PATTERN (p), loop_start);

View File

@ -31,7 +31,7 @@ Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
#if defined (_LIBC) || !defined (__GNU_LIBRARY__)
#ifdef __STDC__
#if defined (__STDC__) && __STDC__
#define POINTER void *
#else
#define POINTER char *
@ -267,7 +267,7 @@ _obstack_newchunk (h, length)
This is here for debugging.
If you use it in a program, you are probably losing. */
#ifdef __STDC__
#if defined (__STDC__) && __STDC__
/* Suppress -Wmissing-prototypes warning. We don't want to declare this in
obstack.h because it is just for debugging. */
int _obstack_allocated_p (struct obstack *h, POINTER obj);
@ -374,7 +374,7 @@ obstack_free (h, obj)
/* Now define the functional versions of the obstack macros.
Define them to simply use the corresponding macros to do the job. */
#ifdef __STDC__
#if defined (__STDC__) && __STDC__
/* These function definitions do not work with non-ANSI preprocessors;
they won't pass through the macro names in parentheses. */

View File

@ -117,6 +117,13 @@ rtx bcmp_libfunc;
rtx memset_libfunc;
rtx bzero_libfunc;
rtx eqhf2_libfunc;
rtx nehf2_libfunc;
rtx gthf2_libfunc;
rtx gehf2_libfunc;
rtx lthf2_libfunc;
rtx lehf2_libfunc;
rtx eqsf2_libfunc;
rtx nesf2_libfunc;
rtx gtsf2_libfunc;
@ -689,13 +696,6 @@ expand_binop (mode, binoptab, op0, op1, target, unsignedp, methods)
if (carries == 0)
inter = 0;
else
inter = expand_binop (word_mode, binoptab, outof_input,
op1, outof_target, unsignedp, next_methods);
if (inter != 0 && inter != outof_target)
emit_move_insn (outof_target, inter);
if (inter != 0)
inter = expand_binop (word_mode, unsigned_shift, into_input,
op1, 0, unsignedp, next_methods);
@ -705,6 +705,13 @@ expand_binop (mode, binoptab, op0, op1, target, unsignedp, methods)
if (inter != 0 && inter != into_target)
emit_move_insn (into_target, inter);
if (inter != 0)
inter = expand_binop (word_mode, binoptab, outof_input,
op1, outof_target, unsignedp, next_methods);
if (inter != 0 && inter != outof_target)
emit_move_insn (outof_target, inter);
}
insns = get_insns ();
@ -1260,8 +1267,10 @@ expand_binop (mode, binoptab, op0, op1, target, unsignedp, methods)
NULL_RTX, unsignedp, methods);
if (temp1 == 0 || temp2 == 0)
res = expand_binop (submode, add_optab, temp1, temp2,
imagr, unsignedp, methods);
break;
res = expand_binop (submode, add_optab, temp1, temp2,
imagr, unsignedp, methods);
if (res == 0)
break;
@ -2760,7 +2769,34 @@ emit_float_lib_cmp (x, y, comparison)
enum machine_mode mode = GET_MODE (x);
rtx libfunc = 0;
if (mode == SFmode)
if (mode == HFmode)
switch (comparison)
{
case EQ:
libfunc = eqhf2_libfunc;
break;
case NE:
libfunc = nehf2_libfunc;
break;
case GT:
libfunc = gthf2_libfunc;
break;
case GE:
libfunc = gehf2_libfunc;
break;
case LT:
libfunc = lthf2_libfunc;
break;
case LE:
libfunc = lehf2_libfunc;
break;
}
else if (mode == SFmode)
switch (comparison)
{
case EQ:
@ -3237,8 +3273,11 @@ expand_float (to, from, unsignedp)
expand_float (target, temp, 0);
/* Multiply by 2 to undo the shift above. */
target = expand_binop (fmode, add_optab, target, target,
temp = expand_binop (fmode, add_optab, target, target,
target, 0, OPTAB_LIB_WIDEN);
if (temp != target)
emit_move_insn (target, temp);
do_pending_stack_adjust ();
emit_label (label);
goto done;
@ -3278,7 +3317,7 @@ expand_float (to, from, unsignedp)
}
#endif
/* No hardware instruction available; call a library rotine to convert from
/* No hardware instruction available; call a library routine to convert from
SImode, DImode, or TImode into SFmode, DFmode, XFmode, or TFmode. */
{
rtx libfcn;
@ -3562,6 +3601,7 @@ expand_fix (to, from, unsignedp)
if (libfcn)
{
rtx insns;
rtx value;
to = protect_from_queue (to, 1);
from = protect_from_queue (from, 0);
@ -3571,12 +3611,14 @@ expand_fix (to, from, unsignedp)
start_sequence ();
emit_library_call (libfcn, 1, GET_MODE (to), 1, from, GET_MODE (from));
value = emit_library_call_value (libfcn, NULL_RTX, 1, GET_MODE (to),
1, from, GET_MODE (from));
insns = get_insns ();
end_sequence ();
emit_libcall_block (insns, target, hard_libcall_value (GET_MODE (to)),
gen_rtx (unsignedp ? FIX : UNSIGNED_FIX,
emit_libcall_block (insns, target, value,
gen_rtx (unsignedp ? UNSIGNED_FIX : FIX,
GET_MODE (to), from));
}
@ -3957,6 +3999,13 @@ init_optabs ()
memset_libfunc = gen_rtx (SYMBOL_REF, Pmode, "memset");
bzero_libfunc = gen_rtx (SYMBOL_REF, Pmode, "bzero");
eqhf2_libfunc = gen_rtx (SYMBOL_REF, Pmode, "__eqhf2");
nehf2_libfunc = gen_rtx (SYMBOL_REF, Pmode, "__nehf2");
gthf2_libfunc = gen_rtx (SYMBOL_REF, Pmode, "__gthf2");
gehf2_libfunc = gen_rtx (SYMBOL_REF, Pmode, "__gehf2");
lthf2_libfunc = gen_rtx (SYMBOL_REF, Pmode, "__lthf2");
lehf2_libfunc = gen_rtx (SYMBOL_REF, Pmode, "__lehf2");
eqsf2_libfunc = gen_rtx (SYMBOL_REF, Pmode, "__eqsf2");
nesf2_libfunc = gen_rtx (SYMBOL_REF, Pmode, "__nesf2");
gtsf2_libfunc = gen_rtx (SYMBOL_REF, Pmode, "__gtsf2");

View File

@ -109,10 +109,22 @@ print_node_brief (file, prefix, node, indent)
fprintf (file, " overflow");
if (TREE_INT_CST_HIGH (node) == 0)
fprintf (file, " %1u", TREE_INT_CST_LOW (node));
fprintf (file,
#if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
" %1u",
#else
" %1lu",
#endif
TREE_INT_CST_LOW (node));
else if (TREE_INT_CST_HIGH (node) == -1
&& TREE_INT_CST_LOW (node) != 0)
fprintf (file, " -%1u", -TREE_INT_CST_LOW (node));
fprintf (file,
#if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
" -%1u",
#else
" -%1lu",
#endif
-TREE_INT_CST_LOW (node));
else
fprintf (file,
#if HOST_BITS_PER_WIDE_INT == 64
@ -132,8 +144,24 @@ print_node_brief (file, prefix, node, indent)
}
if (TREE_CODE (node) == REAL_CST)
{
#ifndef REAL_IS_NOT_DOUBLE
fprintf (file, " %e", TREE_REAL_CST (node));
REAL_VALUE_TYPE d;
if (TREE_OVERFLOW (node))
fprintf (file, " overflow");
#if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
d = TREE_REAL_CST (node);
if (REAL_VALUE_ISINF (d))
fprintf (file, " Inf");
else if (REAL_VALUE_ISNAN (d))
fprintf (file, " Nan");
else
{
char string[100];
REAL_VALUE_TO_DECIMAL (d, "%e", string);
fprintf (file, " %s", string);
}
#else
{
int i;
@ -143,7 +171,7 @@ print_node_brief (file, prefix, node, indent)
fprintf (file, "%02x", *p++);
fprintf (file, "");
}
#endif /* REAL_IS_NOT_DOUBLE */
#endif
}
fprintf (file, ">");
@ -324,22 +352,47 @@ print_node (file, prefix, node, indent)
case 'd':
mode = DECL_MODE (node);
if (DECL_EXTERNAL (node))
fputs (" external", file);
if (DECL_NONLOCAL (node))
fputs (" nonlocal", file);
if (DECL_REGISTER (node))
fputs (" regdecl", file);
if (DECL_INLINE (node))
fputs (" inline", file);
if (DECL_BIT_FIELD (node))
fputs (" bit-field", file);
if (DECL_VIRTUAL_P (node))
fputs (" virtual", file);
if (DECL_IGNORED_P (node))
fputs (" ignored", file);
if (DECL_ABSTRACT (node))
fputs (" abstract", file);
if (DECL_IN_SYSTEM_HEADER (node))
fputs (" in_system_header", file);
if (DECL_COMMON (node))
fputs (" common", file);
if (DECL_EXTERNAL (node))
fputs (" external", file);
if (DECL_REGISTER (node))
fputs (" regdecl", file);
if (DECL_PACKED (node))
fputs (" packed", file);
if (DECL_NONLOCAL (node))
fputs (" nonlocal", file);
if (DECL_INLINE (node))
fputs (" inline", file);
if (TREE_CODE (node) == TYPE_DECL && TYPE_DECL_SUPPRESS_DEBUG (node))
fputs (" supress-debug", file);
if (TREE_CODE (node) == FUNCTION_DECL && DECL_BUILT_IN (node))
fputs (" built-in", file);
if (TREE_CODE (node) == FUNCTION_DECL && DECL_BUILT_IN_NONANSI (node))
fputs (" built-in-nonansi", file);
if (TREE_CODE (node) == FIELD_DECL && DECL_BIT_FIELD (node))
fputs (" bit-field", file);
if (TREE_CODE (node) == LABEL_DECL && DECL_TOO_LATE (node))
fputs (" too-late", file);
if (TREE_CODE (node) == VAR_DECL && DECL_IN_TEXT_SECTION (node))
fputs (" in-text-section", file);
if (DECL_VIRTUAL_P (node))
fputs (" virtual", file);
if (DECL_DEFER_OUTPUT (node))
fputs (" defer-output", file);
if (DECL_TRANSPARENT_UNION (node))
fputs (" transparent-union", file);
if (DECL_LANG_FLAG_0 (node))
fputs (" decl_0", file);
if (DECL_LANG_FLAG_1 (node))
@ -413,7 +466,14 @@ print_node (file, prefix, node, indent)
case 't':
if (TYPE_NO_FORCE_BLK (node))
fputs (" no_force_blk", file);
fputs (" no-force-blk", file);
if (TYPE_STRING_FLAG (node))
fputs (" string-flag", file);
if (TYPE_NEEDS_CONSTRUCTING (node))
fputs (" needs-constructing", file);
if (TYPE_TRANSPARENT_UNION (node))
fputs (" transparent-union", file);
if (TYPE_LANG_FLAG_0 (node))
fputs (" type_0", file);
if (TYPE_LANG_FLAG_1 (node))
@ -558,10 +618,22 @@ print_node (file, prefix, node, indent)
fprintf (file, " overflow");
if (TREE_INT_CST_HIGH (node) == 0)
fprintf (file, " %1u", TREE_INT_CST_LOW (node));
fprintf (file,
#if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
" %1u",
#else
" %1lu",
#endif
TREE_INT_CST_LOW (node));
else if (TREE_INT_CST_HIGH (node) == -1
&& TREE_INT_CST_LOW (node) != 0)
fprintf (file, " -%1u", -TREE_INT_CST_LOW (node));
fprintf (file,
#if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
" -%1u",
#else
" -%1lu",
#endif
-TREE_INT_CST_LOW (node));
else
fprintf (file,
#if HOST_BITS_PER_WIDE_INT == 64
@ -581,17 +653,36 @@ print_node (file, prefix, node, indent)
break;
case REAL_CST:
#ifndef REAL_IS_NOT_DOUBLE
fprintf (file, " %e", TREE_REAL_CST (node));
#else
{
char *p = (char *) &TREE_REAL_CST (node);
fprintf (file, " 0x");
for (i = 0; i < sizeof TREE_REAL_CST (node); i++)
fprintf (file, "%02x", *p++);
fprintf (file, "");
REAL_VALUE_TYPE d;
if (TREE_OVERFLOW (node))
fprintf (file, " overflow");
#if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
d = TREE_REAL_CST (node);
if (REAL_VALUE_ISINF (d))
fprintf (file, " Inf");
else if (REAL_VALUE_ISNAN (d))
fprintf (file, " Nan");
else
{
char string[100];
REAL_VALUE_TO_DECIMAL (d, "%e", string);
fprintf (file, " %s", string);
}
#else
{
int i;
unsigned char *p = (unsigned char *) &TREE_REAL_CST (node);
fprintf (file, " 0x");
for (i = 0; i < sizeof TREE_REAL_CST (node); i++)
fprintf (file, "%02x", *p++);
fprintf (file, "");
}
#endif
}
#endif /* REAL_IS_NOT_DOUBLE */
break;
case COMPLEX_CST:

View File

@ -1718,9 +1718,9 @@ constrain_operands (insn_code_num, strict)
case 'p':
/* p is used for address_operands. When we are called by
gen_input_reload, no one will have checked that the
address is strictly valid, i.e., that all pseudos
requiring hard regs have gotten them. */
gen_reload, no one will have checked that the address is
strictly valid, i.e., that all pseudos requiring hard regs
have gotten them. */
if (strict <= 0
|| (strict_memory_address_p
(insn_operand_mode[insn_code_num][opno], op)))

View File

@ -1591,7 +1591,6 @@ delete_insn_for_stacker (insn)
PUT_CODE (insn, NOTE);
NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
NOTE_SOURCE_FILE (insn) = 0;
INSN_DELETED_P (insn) = 1;
}
/* Emit an insn to pop virtual register REG before or after INSN.
@ -2581,7 +2580,7 @@ subst_stack_regs (insn, regstack)
/* subst_stack_regs_pat may have deleted a no-op insn. If so, any
REG_UNUSED will already have been dealt with, so just return. */
if (INSN_DELETED_P (insn))
if (GET_CODE (insn) == NOTE)
return;
/* If there is a REG_UNUSED note on a stack register on this insn,

View File

@ -86,6 +86,7 @@ a register with any other reload. */
#define REG_OK_STRICT
#include <stdio.h>
#include "config.h"
#include "rtl.h"
#include "insn-config.h"
@ -777,8 +778,8 @@ push_reload (in, out, inloc, outloc, class,
a pseudo and hence will become a MEM) with M1 wider than M2 and the
register is a pseudo, also reload the inside expression.
For machines that extend byte loads, do this for any SUBREG of a pseudo
where both M1 and M2 are a word or smaller unless they are the same
size.
where both M1 and M2 are a word or smaller, M1 is wider than M2, and
M2 is an integral mode that gets extended when loaded.
Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
either M1 is not valid for R or M2 is wider than a word but we only
need one word to store an M2-sized quantity in R.
@ -792,7 +793,11 @@ push_reload (in, out, inloc, outloc, class,
STRICT_LOW_PART (presumably, in == out in the cas).
Also reload the inner expression if it does not require a secondary
reload but the SUBREG does. */
reload but the SUBREG does.
Finally, reload the inner expression if it is a register that is in
the class whose registers cannot be referenced in a different size
and M1 is not the same size as M2. */
if (in != 0 && GET_CODE (in) == SUBREG
&& (CONSTANT_P (SUBREG_REG (in))
@ -808,7 +813,9 @@ push_reload (in, out, inloc, outloc, class,
&& (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
<= UNITS_PER_WORD)
&& (GET_MODE_SIZE (inmode)
!= GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))))
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
&& INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
&& LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != NIL)
#endif
))
|| (GET_CODE (SUBREG_REG (in)) == REG
@ -832,6 +839,15 @@ push_reload (in, out, inloc, outloc, class,
GET_MODE (SUBREG_REG (in)),
SUBREG_REG (in))
== NO_REGS))
#endif
#ifdef CLASS_CANNOT_CHANGE_SIZE
|| (GET_CODE (SUBREG_REG (in)) == REG
&& REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
&& (TEST_HARD_REG_BIT
(reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
REGNO (SUBREG_REG (in))))
&& (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
!= GET_MODE_SIZE (inmode)))
#endif
))
{
@ -885,15 +901,7 @@ push_reload (in, out, inloc, outloc, class,
&& REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
|| GET_CODE (SUBREG_REG (out)) == MEM)
&& ((GET_MODE_SIZE (outmode)
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
#ifdef LOAD_EXTEND_OP
|| (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
&& (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
<= UNITS_PER_WORD)
&& (GET_MODE_SIZE (outmode)
!= GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))))
#endif
))
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))))
|| (GET_CODE (SUBREG_REG (out)) == REG
&& REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
&& ((GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
@ -912,6 +920,15 @@ push_reload (in, out, inloc, outloc, class,
GET_MODE (SUBREG_REG (out)),
SUBREG_REG (out))
== NO_REGS))
#endif
#ifdef CLASS_CANNOT_CHANGE_SIZE
|| (GET_CODE (SUBREG_REG (out)) == REG
&& REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
&& (TEST_HARD_REG_BIT
(reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
REGNO (SUBREG_REG (out))))
&& (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
!= GET_MODE_SIZE (outmode)))
#endif
))
{
@ -1881,7 +1898,7 @@ operands_match_p (x, y)
int
n_occurrences (c, s)
char c;
int c;
char *s;
{
int n = 0;
@ -2401,7 +2418,7 @@ find_reloads (insn, replace, ind_levels, live_known, reload_reg_p)
&& &SET_DEST (set) == recog_operand_loc[i]);
else if (code == PLUS)
/* We can get a PLUS as an "operand" as a result of
register elimination. See eliminate_regs and gen_input_reload. */
register elimination. See eliminate_regs and gen_reload. */
substed_operand[i] = recog_operand[i] = *recog_operand_loc[i]
= find_reloads_toplev (recog_operand[i], i, address_type[i],
ind_levels, 0);
@ -2550,12 +2567,15 @@ find_reloads (insn, replace, ind_levels, live_known, reload_reg_p)
|| GET_CODE (operand) == PLUS
/* We must force a reload of paradoxical SUBREGs
of a MEM because the alignment of the inner value
may not be enough to do the outer reference.
may not be enough to do the outer reference. On
big-endian machines, it may also reference outside
the object.
On machines that extend byte operations and we have a
SUBREG where both the inner and outer modes are different
size but no wider than a word, combine.c has made
assumptions about the behavior of the machine in such
SUBREG where both the inner and outer modes are no wider
than a word and the inner mode is narrower, is integral,
and gets extended when loaded from memory, combine.c has
made assumptions about the behavior of the machine in such
register access. If the data is, in fact, in memory we
must always load using the size assumed to be in the
register and let the insn do the different-sized
@ -2567,12 +2587,15 @@ find_reloads (insn, replace, ind_levels, live_known, reload_reg_p)
< BIGGEST_ALIGNMENT)
&& (GET_MODE_SIZE (operand_mode[i])
> GET_MODE_SIZE (GET_MODE (operand))))
|| (GET_CODE (operand) == MEM && BYTES_BIG_ENDIAN)
#ifdef LOAD_EXTEND_OP
|| (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
&& (GET_MODE_SIZE (GET_MODE (operand))
<= UNITS_PER_WORD)
&& (GET_MODE_SIZE (operand_mode[i])
!= GET_MODE_SIZE (GET_MODE (operand))))
> GET_MODE_SIZE (GET_MODE (operand)))
&& INTEGRAL_MODE_P (GET_MODE (operand))
&& LOAD_EXTEND_OP (GET_MODE (operand)) != NIL)
#endif
))
/* Subreg of a hard reg which can't handle the subreg's mode
@ -2708,7 +2731,7 @@ find_reloads (insn, replace, ind_levels, live_known, reload_reg_p)
case 'p':
/* All necessary reloads for an address_operand
were handled in find_reloads_address. */
this_alternative[i] = (int) ALL_REGS;
this_alternative[i] = (int) BASE_REG_CLASS;
win = 1;
break;
@ -4233,12 +4256,12 @@ find_reloads_address (mode, memrefloc, ad, loc, opnum, type, ind_levels)
&& ! memory_address_p (mode, ad))
{
*loc = ad = gen_rtx (PLUS, GET_MODE (ad),
XEXP (XEXP (ad, 0), 0),
plus_constant (XEXP (XEXP (ad, 0), 1),
INTVAL (XEXP (ad, 1))),
XEXP (XEXP (ad, 0), 0));
find_reloads_address_part (XEXP (ad, 0), &XEXP (ad, 0), BASE_REG_CLASS,
INTVAL (XEXP (ad, 1))));
find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1), BASE_REG_CLASS,
GET_MODE (ad), opnum, type, ind_levels);
find_reloads_address_1 (XEXP (ad, 1), 1, &XEXP (ad, 1), opnum, type, 0);
find_reloads_address_1 (XEXP (ad, 0), 1, &XEXP (ad, 0), opnum, type, 0);
return 1;
}
@ -5648,3 +5671,110 @@ regno_clobbered_p (regno, insn)
return 0;
}
static char *reload_when_needed_name[] =
{
"RELOAD_FOR_INPUT",
"RELOAD_FOR_OUTPUT",
"RELOAD_FOR_INSN",
"RELOAD_FOR_INPUT_ADDRESS",
"RELOAD_FOR_OUTPUT_ADDRESS",
"RELOAD_FOR_OPERAND_ADDRESS",
"RELOAD_FOR_OPADDR_ADDR",
"RELOAD_OTHER",
"RELOAD_FOR_OTHER_ADDRESS"
};
static char *reg_class_names[] = REG_CLASS_NAMES;
/* This function is used to print the variables set by 'find_reloads' */
void
debug_reload()
{
int r;
fprintf (stderr, "\nn_reloads = %d\n", n_reloads);
for (r = 0; r < n_reloads; r++)
{
fprintf (stderr, "\nRELOAD %d\n", r);
if (reload_in[r])
{
fprintf (stderr, "\nreload_in (%s) = ", mode_name[reload_inmode[r]]);
debug_rtx (reload_in[r]);
}
if (reload_out[r])
{
fprintf (stderr, "\nreload_out (%s) = ", mode_name[reload_outmode[r]]);
debug_rtx (reload_out[r]);
}
fprintf (stderr, "%s, ", reg_class_names[(int) reload_reg_class[r]]);
fprintf (stderr, "%s (opnum = %d)", reload_when_needed_name[(int)reload_when_needed[r]],
reload_opnum[r]);
if (reload_optional[r])
fprintf (stderr, ", optional");
if (reload_in[r])
fprintf (stderr, ", inc by %d\n", reload_inc[r]);
if (reload_nocombine[r])
fprintf (stderr, ", can combine", reload_nocombine[r]);
if (reload_secondary_p[r])
fprintf (stderr, ", secondary_reload_p");
if (reload_in_reg[r])
{
fprintf (stderr, "\nreload_in_reg:\t\t\t");
debug_rtx (reload_in_reg[r]);
}
if (reload_reg_rtx[r])
{
fprintf (stderr, "\nreload_reg_rtx:\t\t\t");
debug_rtx (reload_reg_rtx[r]);
}
if (reload_secondary_in_reload[r] != -1)
{
fprintf (stderr, "\nsecondary_in_reload = ");
fprintf (stderr, "%d ", reload_secondary_in_reload[r]);
}
if (reload_secondary_out_reload[r] != -1)
{
if (reload_secondary_in_reload[r] != -1)
fprintf (stderr, ", secondary_out_reload = ");
else
fprintf (stderr, "\nsecondary_out_reload = ");
fprintf (stderr, "%d", reload_secondary_out_reload[r]);
}
if (reload_secondary_in_icode[r] != CODE_FOR_nothing)
{
fprintf (stderr, "\nsecondary_in_icode = ");
fprintf (stderr, "%s", insn_name[r]);
}
if (reload_secondary_out_icode[r] != CODE_FOR_nothing)
{
if (reload_secondary_in_icode[r] != CODE_FOR_nothing)
fprintf (stderr, ", secondary_out_icode = ");
else
fprintf (stderr, "\nsecondary_out_icode = ");
fprintf (stderr, "%s ", insn_name[r]);
}
fprintf (stderr, "\n");
}
fprintf (stderr, "\n");
}

View File

@ -327,7 +327,7 @@ struct hard_reg_n_uses { int regno; int uses; };
static int possible_group_p PROTO((int, int *));
static void count_possible_groups PROTO((int *, enum machine_mode *,
int *));
int *, int));
static int modes_equiv_for_class_p PROTO((enum machine_mode,
enum machine_mode,
enum reg_class));
@ -1153,7 +1153,8 @@ reload (first, global, dumpfile)
if (other_mode != VOIDmode && other_mode != allocate_mode
&& ! modes_equiv_for_class_p (allocate_mode,
other_mode, class))
abort ();
fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
insn);
}
else if (size == 1)
{
@ -1349,19 +1350,14 @@ reload (first, global, dumpfile)
}
/* Now count extra regs if there might be a conflict with
the return value register.
the return value register. */
??? This is not quite correct because we don't properly
handle the case of groups, but if we end up doing
something wrong, it either will end up not mattering or
we will abort elsewhere. */
for (r = regno; r < regno + nregs; r++)
if (spill_reg_order[r] >= 0)
for (i = 0; i < N_REG_CLASSES; i++)
if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
{
if (basic_needs[i] > 0 || basic_groups[i] > 0)
if (basic_needs[i] > 0)
{
enum reg_class *p;
@ -1370,6 +1366,15 @@ reload (first, global, dumpfile)
while (*p != LIM_REG_CLASSES)
insn_needs.other.regs[0][(int) *p++]++;
}
if (basic_groups[i] > 0)
{
enum reg_class *p;
insn_needs.other.groups[i]++;
p = reg_class_superclasses[i];
while (*p != LIM_REG_CLASSES)
insn_needs.other.groups[(int) *p++]++;
}
}
}
#endif /* SMALL_REGISTER_CLASSES */
@ -1603,7 +1608,8 @@ reload (first, global, dumpfile)
/* If any single spilled regs happen to form groups,
count them now. Maybe we don't really need
to spill another group. */
count_possible_groups (group_size, group_mode, max_groups);
count_possible_groups (group_size, group_mode, max_groups,
class);
if (max_groups[class] <= 0)
break;
@ -2063,68 +2069,65 @@ possible_group_p (regno, max_groups)
return 0;
}
/* Count any groups that can be formed from the registers recently spilled.
This is done class by class, in order of ascending class number. */
/* Count any groups of CLASS that can be formed from the registers recently
spilled. */
static void
count_possible_groups (group_size, group_mode, max_groups)
count_possible_groups (group_size, group_mode, max_groups, class)
int *group_size;
enum machine_mode *group_mode;
int *max_groups;
int class;
{
int i;
HARD_REG_SET new;
int i, j;
/* Now find all consecutive groups of spilled registers
and mark each group off against the need for such groups.
But don't count them against ordinary need, yet. */
for (i = 0; i < N_REG_CLASSES; i++)
if (group_size[i] > 1)
if (group_size[class] == 0)
return;
CLEAR_HARD_REG_SET (new);
/* Make a mask of all the regs that are spill regs in class I. */
for (i = 0; i < n_spills; i++)
if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
&& ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
&& ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
SET_HARD_REG_BIT (new, spill_regs[i]);
/* Find each consecutive group of them. */
for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
if (TEST_HARD_REG_BIT (new, i)
&& i + group_size[class] <= FIRST_PSEUDO_REGISTER
&& HARD_REGNO_MODE_OK (i, group_mode[class]))
{
HARD_REG_SET new;
int j;
for (j = 1; j < group_size[class]; j++)
if (! TEST_HARD_REG_BIT (new, i + j))
break;
CLEAR_HARD_REG_SET (new);
if (j == group_size[class])
{
/* We found a group. Mark it off against this class's need for
groups, and against each superclass too. */
register enum reg_class *p;
/* Make a mask of all the regs that are spill regs in class I. */
for (j = 0; j < n_spills; j++)
if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
&& ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
&& ! TEST_HARD_REG_BIT (counted_for_nongroups,
spill_regs[j]))
SET_HARD_REG_BIT (new, spill_regs[j]);
max_groups[class]--;
p = reg_class_superclasses[class];
while (*p != LIM_REG_CLASSES)
max_groups[(int) *p++]--;
/* Find each consecutive group of them. */
for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
if (TEST_HARD_REG_BIT (new, j)
&& j + group_size[i] <= FIRST_PSEUDO_REGISTER
/* Next line in case group-mode for this class
demands an even-odd pair. */
&& HARD_REGNO_MODE_OK (j, group_mode[i]))
{
int k;
for (k = 1; k < group_size[i]; k++)
if (! TEST_HARD_REG_BIT (new, j + k))
break;
if (k == group_size[i])
{
/* We found a group. Mark it off against this class's
need for groups, and against each superclass too. */
register enum reg_class *p;
max_groups[i]--;
p = reg_class_superclasses[i];
while (*p != LIM_REG_CLASSES)
max_groups[(int) *p++]--;
/* Don't count these registers again. */
for (k = 0; k < group_size[i]; k++)
SET_HARD_REG_BIT (counted_for_groups, j + k);
}
/* Skip to the last reg in this group. When j is incremented
above, it will then point to the first reg of the next
possible group. */
j += k - 1;
}
/* Don't count these registers again. */
for (j = 0; j < group_size[class]; j++)
SET_HARD_REG_BIT (counted_for_groups, i + j);
}
/* Skip to the last reg in this group. When i is incremented above,
it will then point to the first reg of the next possible group. */
i += j - 1;
}
}
/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
@ -2165,7 +2168,7 @@ spill_failure (insn)
if (asm_noperands (PATTERN (insn)) >= 0)
error_for_asm (insn, "`asm' needs too many reloads");
else
abort ();
fatal_insn ("Unable to find a register to spill.", insn);
}
/* Add a new register to the tables of available spill-registers
@ -2886,11 +2889,20 @@ eliminate_regs (x, mem_mode, insn)
/* If we didn't change anything, we must retain the pseudo. */
if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
new = XEXP (x, 0);
new = SUBREG_REG (x);
else
/* Otherwise, ensure NEW isn't shared in case we have to reload
it. */
new = copy_rtx (new);
{
/* Otherwise, ensure NEW isn't shared in case we have to reload
it. */
new = copy_rtx (new);
/* In this case, we must show that the pseudo is used in this
insn so that delete_output_reload will do the right thing. */
if (insn != 0 && GET_CODE (insn) != EXPR_LIST
&& GET_CODE (insn) != INSN_LIST)
emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
insn);
}
}
else
new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
@ -2907,7 +2919,11 @@ eliminate_regs (x, mem_mode, insn)
smaller. So leave the SUBREG then. */
&& ! (GET_CODE (SUBREG_REG (x)) == REG
&& GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
&& GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
&& GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
&& (GET_MODE_SIZE (GET_MODE (x))
> GET_MODE_SIZE (GET_MODE (new)))
&& INTEGRAL_MODE_P (GET_MODE (new))
&& LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
#endif
)
{
@ -3668,11 +3684,12 @@ order_regs_for_reload ()
}
/* Used in reload_as_needed to sort the spilled regs. */
static int
compare_spill_regs (r1, r2)
short *r1, *r2;
{
return *r1 < *r2 ? -1: 1;
return *r1 - *r2;
}
/* Reload pseudo-registers into hard regs around each insn as needed.
@ -3840,7 +3857,7 @@ reload_as_needed (first, live_known)
&& ! reload_optional[i]
&& (reload_in[i] != 0 || reload_out[i] != 0
|| reload_secondary_p[i] != 0))
abort ();
fatal_insn ("Non-optional registers need a spill register", insn);
/* Now compute which reload regs to reload them into. Perhaps
reusing reload regs from previous insns, or else output
@ -4836,7 +4853,7 @@ allocate_reload_reg (r, insn, last_reload, noerror)
failure:
if (asm_noperands (PATTERN (insn)) < 0)
/* It's the compiler's fault. */
abort ();
fatal_insn ("Could not find a spill register", insn);
/* It's the user's fault; the operand's mode and constraint
don't match. Disable this reload so we don't crash in final. */
@ -5628,7 +5645,9 @@ emit_reload_insns (insn)
{
register rtx old;
rtx oldequiv_reg = 0;
rtx store_insn = 0;
if (reload_spill_index[j] >= 0)
new_spill_reg_store[reload_spill_index[j]] = 0;
old = reload_in[j];
if (old != 0 && ! reload_inherited[j]
@ -6010,9 +6029,9 @@ emit_reload_insns (insn)
third_reload_reg)));
}
else
gen_input_reload (second_reload_reg, oldequiv,
reload_opnum[j],
reload_when_needed[j]);
gen_reload (second_reload_reg, oldequiv,
reload_opnum[j],
reload_when_needed[j]);
oldequiv = second_reload_reg;
}
@ -6021,8 +6040,8 @@ emit_reload_insns (insn)
#endif
if (! special && ! rtx_equal_p (reloadreg, oldequiv))
gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
reload_when_needed[j]);
gen_reload (reloadreg, oldequiv, reload_opnum[j],
reload_when_needed[j]);
#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
/* We may have to make a REG_DEAD note for the secondary reload
@ -6246,7 +6265,7 @@ emit_reload_insns (insn)
/* VOIDmode should never happen for an output. */
if (asm_noperands (PATTERN (insn)) < 0)
/* It's the compiler's fault. */
abort ();
fatal_insn ("VOIDmode on an output", insn);
error_for_asm (insn, "output operand is constant in `asm'");
/* Prevent crash--use something we know is valid. */
mode = word_mode;
@ -6259,7 +6278,7 @@ emit_reload_insns (insn)
#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
/* If we need two reload regs, set RELOADREG to the intermediate
one, since it will be stored into OUT. We might need a secondary
one, since it will be stored into OLD. We might need a secondary
register only for an input reload, so check again here. */
if (reload_secondary_out_reload[j] >= 0)
@ -6289,10 +6308,10 @@ emit_reload_insns (insn)
{
/* See if we need both a scratch and intermediate reload
register. */
int secondary_reload = reload_secondary_out_reload[j];
enum insn_code tertiary_icode
= reload_secondary_out_icode[secondary_reload];
rtx pat;
if (GET_MODE (reloadreg) != mode)
reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
@ -6301,44 +6320,24 @@ emit_reload_insns (insn)
{
rtx third_reloadreg
= reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
pat = (GEN_FCN (tertiary_icode)
(reloadreg, second_reloadreg, third_reloadreg));
}
#ifdef SECONDARY_MEMORY_NEEDED
/* If we need a memory location to do the move, do it that way. */
else if (GET_CODE (reloadreg) == REG
&& REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
&& SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
REGNO_REG_CLASS (REGNO (second_reloadreg)),
GET_MODE (second_reloadreg)))
{
/* Get the memory to use and rewrite both registers
to its mode. */
rtx loc
= get_secondary_mem (reloadreg,
GET_MODE (second_reloadreg),
reload_opnum[j],
reload_when_needed[j]);
rtx tmp_reloadreg;
if (GET_MODE (loc) != GET_MODE (second_reloadreg))
second_reloadreg = gen_rtx (REG, GET_MODE (loc),
REGNO (second_reloadreg));
if (GET_MODE (loc) != GET_MODE (reloadreg))
tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
REGNO (reloadreg));
else
tmp_reloadreg = reloadreg;
emit_move_insn (loc, second_reloadreg);
pat = gen_move_insn (tmp_reloadreg, loc);
}
#endif
else
pat = gen_move_insn (reloadreg, second_reloadreg);
emit_insn (pat);
/* Copy primary reload reg to secondary reload reg.
(Note that these have been swapped above, then
secondary reload reg to OLD using our insn. */
gen_reload (reloadreg, second_reloadreg,
reload_opnum[j], reload_when_needed[j]);
emit_insn ((GEN_FCN (tertiary_icode)
(real_old, reloadreg, third_reloadreg)));
special = 1;
}
else
/* Copy between the reload regs here and then to
OUT later. */
gen_reload (reloadreg, second_reloadreg,
reload_opnum[j], reload_when_needed[j]);
}
}
}
@ -6346,34 +6345,8 @@ emit_reload_insns (insn)
/* Output the last reload insn. */
if (! special)
{
#ifdef SECONDARY_MEMORY_NEEDED
/* If we need a memory location to do the move, do it that way. */
if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
&& SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
REGNO_REG_CLASS (REGNO (reloadreg)),
GET_MODE (reloadreg)))
{
/* Get the memory to use and rewrite both registers to
its mode. */
rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
reload_opnum[j],
reload_when_needed[j]);
if (GET_MODE (loc) != GET_MODE (reloadreg))
reloadreg = gen_rtx (REG, GET_MODE (loc),
REGNO (reloadreg));
if (GET_MODE (loc) != GET_MODE (old))
old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
emit_insn (gen_move_insn (loc, reloadreg));
emit_insn (gen_move_insn (old, loc));
}
else
#endif
emit_insn (gen_move_insn (old, reloadreg));
}
gen_reload (old, reloadreg, reload_opnum[j],
reload_when_needed[j]);
#ifdef PRESERVE_DEATH_INFO_REGNO_P
/* If final will look at death notes for this reg,
@ -6408,17 +6381,14 @@ emit_reload_insns (insn)
reg_has_output_reload will make this do nothing. */
note_stores (PATTERN (p), forget_old_reloads_1);
if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
store_insn = p;
if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
&& reload_spill_index[j] >= 0)
new_spill_reg_store[reload_spill_index[j]] = p;
}
output_reload_insns[reload_opnum[j]] = get_insns ();
end_sequence ();
}
if (reload_spill_index[j] >= 0)
new_spill_reg_store[reload_spill_index[j]] = store_insn;
}
/* Now write all the insns we made for reloads in the order expected by
@ -6640,14 +6610,15 @@ emit_reload_insns (insn)
}
}
/* Emit code to perform an input reload of IN to RELOADREG. IN is from
operand OPNUM with reload type TYPE.
/* Emit code to perform a reload from IN (which may be a reload register) to
OUT (which may also be a reload register). IN or OUT is from operand
OPNUM with reload type TYPE.
Returns first insn emitted. */
rtx
gen_input_reload (reloadreg, in, opnum, type)
rtx reloadreg;
gen_reload (out, in, opnum, type)
rtx out;
rtx in;
int opnum;
enum reload_type type;
@ -6714,13 +6685,13 @@ gen_input_reload (reloadreg, in, opnum, type)
it will be A = A + B as constrain_operands expects. */
if (GET_CODE (XEXP (in, 1)) == REG
&& REGNO (reloadreg) == REGNO (XEXP (in, 1)))
&& REGNO (out) == REGNO (XEXP (in, 1)))
tem = op0, op0 = op1, op1 = tem;
if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
code = recog_memoized (insn);
if (code >= 0)
@ -6749,16 +6720,16 @@ gen_input_reload (reloadreg, in, opnum, type)
&& REGNO (op1) >= FIRST_PSEUDO_REGISTER))
tem = op0, op0 = op1, op1 = tem;
emit_insn (gen_move_insn (reloadreg, op0));
emit_insn (gen_move_insn (out, op0));
/* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
/* If OP0 and OP1 are the same, we can use OUT for OP1.
This fixes a problem on the 32K where the stack pointer cannot
be used as an operand of an add insn. */
if (rtx_equal_p (op0, op1))
op1 = reloadreg;
op1 = out;
insn = emit_insn (gen_add2_insn (reloadreg, op1));
insn = emit_insn (gen_add2_insn (out, op1));
/* If that failed, copy the address register to the reload register.
Then add the constant to the reload register. */
@ -6777,43 +6748,44 @@ gen_input_reload (reloadreg, in, opnum, type)
delete_insns_since (last);
emit_insn (gen_move_insn (reloadreg, op1));
emit_insn (gen_add2_insn (reloadreg, op0));
emit_insn (gen_move_insn (out, op1));
emit_insn (gen_add2_insn (out, op0));
}
#ifdef SECONDARY_MEMORY_NEEDED
/* If we need a memory location to do the move, do it that way. */
else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
&& GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
&& SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
REGNO_REG_CLASS (REGNO (reloadreg)),
GET_MODE (reloadreg)))
REGNO_REG_CLASS (REGNO (out)),
GET_MODE (out)))
{
/* Get the memory to use and rewrite both registers to its mode. */
rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
if (GET_MODE (loc) != GET_MODE (reloadreg))
reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
if (GET_MODE (loc) != GET_MODE (out))
out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
if (GET_MODE (loc) != GET_MODE (in))
in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
emit_insn (gen_move_insn (loc, in));
emit_insn (gen_move_insn (reloadreg, loc));
emit_insn (gen_move_insn (out, loc));
}
#endif
/* If IN is a simple operand, use gen_move_insn. */
else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
emit_insn (gen_move_insn (reloadreg, in));
emit_insn (gen_move_insn (out, in));
#ifdef HAVE_reload_load_address
else if (HAVE_reload_load_address)
emit_insn (gen_reload_load_address (reloadreg, in));
emit_insn (gen_reload_load_address (out, in));
#endif
/* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
/* Otherwise, just write (set OUT IN) and hope for the best. */
else
emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
emit_insn (gen_rtx (SET, VOIDmode, out, in));
/* Return the first insn emitted.
We can not just return get_last_insn, because there may have
@ -6960,7 +6932,7 @@ inc_for_reload (reloadreg, value, inc_amount)
emit_insn (gen_move_insn (reloadreg, incloc));
/* See if we can directly increment INCLOC. Use a method similar to that
in gen_input_reload. */
in gen_reload. */
last = get_last_insn ();
add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,

View File

@ -607,6 +607,22 @@ mark_set_resources (x, res, in_dest, include_delayed_effects)
mark_set_resources (XEXP (x, 0), res, 0, 0);
return;
case SUBREG:
if (in_dest)
{
if (GET_CODE (SUBREG_REG (x)) != REG)
mark_set_resources (SUBREG_REG (x), res,
in_dest, include_delayed_effects);
else
{
int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
for (i = regno; i < last_regno; i++)
SET_HARD_REG_BIT (res->regs, i);
}
}
return;
case REG:
if (in_dest)
for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
@ -1762,6 +1778,7 @@ try_merge_delay_insns (insn, thread)
for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
{
rtx pat = PATTERN (trial);
rtx oldtrial = trial;
next_trial = next_nonnote_insn (trial);
@ -1781,6 +1798,8 @@ try_merge_delay_insns (insn, thread)
&& (trial = try_split (pat, trial, 0)) != 0
/* Update next_trial, in case try_split succeeded. */
&& (next_trial = next_nonnote_insn (trial))
/* Likewise THREAD. */
&& (thread = oldtrial == thread ? trial : thread)
&& rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
/* Have to test this condition if annul condition is different
from (and less restrictive than) non-annulling one. */
@ -1790,6 +1809,9 @@ try_merge_delay_insns (insn, thread)
if (! annul_p)
{
update_block (trial, thread);
if (trial == thread)
thread = next_active_insn (thread);
delete_insn (trial);
INSN_FROM_TARGET_P (next_to_match) = 0;
}
@ -3265,6 +3287,13 @@ fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
if (own_thread)
{
update_block (trial, thread);
if (trial == thread)
{
thread = next_active_insn (thread);
if (new_thread == trial)
new_thread = thread;
}
delete_insn (trial);
}
else
@ -3284,6 +3313,8 @@ fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
trial = try_split (pat, trial, 0);
if (new_thread == old_trial)
new_thread = trial;
if (thread == old_trial)
thread = trial;
pat = PATTERN (trial);
if (eligible_for_delay (insn, *pslots_filled, trial, flags))
goto winner;

View File

@ -1603,6 +1603,10 @@ may_trap_p (x)
we can link this file into other programs. */
if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
return 1;
case EXPR_LIST:
/* An EXPR_LIST is used to represent a function call. This
certainly may trap. */
return 1;
default:
/* Any floating arithmetic may trap. */
if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)

View File

@ -315,7 +315,7 @@ static void add_insn_mem_dependence PROTO((rtx *, rtx *, rtx, rtx));
static void flush_pending_lists PROTO((rtx));
static void sched_analyze_1 PROTO((rtx, rtx));
static void sched_analyze_2 PROTO((rtx, rtx));
static void sched_analyze_insn PROTO((rtx, rtx));
static void sched_analyze_insn PROTO((rtx, rtx, rtx));
static int sched_analyze PROTO((rtx, rtx));
static void sched_note_set PROTO((int, rtx, int));
static int rank_for_schedule PROTO((rtx *, rtx *));
@ -2013,8 +2013,9 @@ sched_analyze_2 (x, insn)
/* Analyze an INSN with pattern X to find all dependencies. */
static void
sched_analyze_insn (x, insn)
sched_analyze_insn (x, insn, loop_notes)
rtx x, insn;
rtx loop_notes;
{
register RTX_CODE code = GET_CODE (x);
rtx link;
@ -2048,6 +2049,36 @@ sched_analyze_insn (x, insn)
sched_analyze_2 (XEXP (link, 0), insn);
}
/* If there is a LOOP_{BEG,END} note in the middle of a basic block, then
we must be sure that no instructions are scheduled across it.
Otherwise, the reg_n_refs info (which depends on loop_depth) would
become incorrect. */
if (loop_notes)
{
int max_reg = max_reg_num ();
rtx link;
for (i = 0; i < max_reg; i++)
{
rtx u;
for (u = reg_last_uses[i]; u; u = XEXP (u, 1))
add_dependence (insn, XEXP (u, 0), REG_DEP_ANTI);
reg_last_uses[i] = 0;
if (reg_last_sets[i])
add_dependence (insn, reg_last_sets[i], 0);
}
reg_pending_sets_all = 1;
flush_pending_lists (insn);
link = loop_notes;
while (XEXP (link, 1))
link = XEXP (link, 1);
XEXP (link, 1) = REG_NOTES (insn);
REG_NOTES (insn) = loop_notes;
}
/* After reload, it is possible for an instruction to have a REG_DEAD note
for a register that actually dies a few instructions earlier. For
example, this can happen with SECONDARY_MEMORY_NEEDED reloads.
@ -2107,7 +2138,8 @@ sched_analyze_insn (x, insn)
prev_dep_insn = insn;
dep_insn = PREV_INSN (insn);
while (GET_CODE (dep_insn) == INSN
&& GET_CODE (PATTERN (dep_insn)) == USE)
&& GET_CODE (PATTERN (dep_insn)) == USE
&& GET_CODE (XEXP (PATTERN (dep_insn), 0)) == REG)
{
SCHED_GROUP_P (prev_dep_insn) = 1;
@ -2135,6 +2167,7 @@ sched_analyze (head, tail)
register int n_insns = 0;
register rtx u;
register int luid = 0;
rtx loop_notes = 0;
for (insn = head; ; insn = NEXT_INSN (insn))
{
@ -2142,7 +2175,8 @@ sched_analyze (head, tail)
if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
{
sched_analyze_insn (PATTERN (insn), insn);
sched_analyze_insn (PATTERN (insn), insn, loop_notes);
loop_notes = 0;
n_insns += 1;
}
else if (GET_CODE (insn) == CALL_INSN)
@ -2179,7 +2213,8 @@ sched_analyze (head, tail)
/* Add a fake REG_NOTE which we will later convert
back into a NOTE_INSN_SETJMP note. */
REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_DEAD, constm1_rtx,
REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_DEAD,
GEN_INT (NOTE_INSN_SETJMP),
REG_NOTES (insn));
}
else
@ -2207,7 +2242,8 @@ sched_analyze (head, tail)
}
LOG_LINKS (sched_before_next_call) = 0;
sched_analyze_insn (PATTERN (insn), insn);
sched_analyze_insn (PATTERN (insn), insn, loop_notes);
loop_notes = 0;
/* We don't need to flush memory for a function call which does
not involve memory. */
@ -2224,6 +2260,11 @@ sched_analyze (head, tail)
last_function_call = insn;
n_insns += 1;
}
else if (GET_CODE (insn) == NOTE
&& (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
|| NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END))
loop_notes = gen_rtx (EXPR_LIST, REG_DEAD,
GEN_INT (NOTE_LINE_NUMBER (insn)), loop_notes);
if (insn == tail)
return n_insns;
@ -2825,6 +2866,16 @@ attach_deaths (x, insn, set_p)
{
if (! all_needed && ! dead_or_set_p (insn, x))
{
/* Check for the case where the register dying partially
overlaps the register set by this insn. */
if (regno < FIRST_PSEUDO_REGISTER
&& HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
{
int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
while (--n >= 0)
some_needed |= dead_or_set_regno_p (insn, regno + n);
}
/* If none of the words in X is needed, make a REG_DEAD
note. Otherwise, we must make partial REG_DEAD
notes. */
@ -2985,8 +3036,11 @@ unlink_notes (insn, tail)
/* Don't save away NOTE_INSN_SETJMPs, because they must remain
immediately after the call they follow. We use a fake
(REG_DEAD (const_int -1)) note to remember them. */
else if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_SETJMP)
(REG_DEAD (const_int -1)) note to remember them.
Likewise with NOTE_INSN_LOOP_BEG and NOTE_INSN_LOOP_END. */
else if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_SETJMP
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_END)
{
/* Insert the note at the end of the notes list. */
PREV_INSN (insn) = note_list;
@ -3803,17 +3857,23 @@ schedule_block (b, file)
PREV_INSN (last) = insn;
last = insn;
/* Check to see if we need to re-emit a NOTE_INSN_SETJMP here. */
if (GET_CODE (insn) == CALL_INSN)
{
rtx note = find_reg_note (insn, REG_DEAD, constm1_rtx);
/* Check to see if we need to re-emit any notes here. */
{
rtx note;
if (note)
{
emit_note_after (NOTE_INSN_SETJMP, insn);
remove_note (insn, note);
}
}
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
{
if (REG_NOTE_KIND (note) == REG_DEAD
&& GET_CODE (XEXP (note, 0)) == CONST_INT)
{
if (INTVAL (XEXP (note, 0)) == NOTE_INSN_SETJMP)
emit_note_after (INTVAL (XEXP (note, 0)), insn);
else
last = emit_note_before (INTVAL (XEXP (note, 0)), last);
remove_note (insn, note);
}
}
}
/* Everything that precedes INSN now either becomes "ready", if
it can execute immediately before INSN, or "pending", if

View File

@ -53,7 +53,7 @@ AT&T C compiler. From the example below I would conclude the following:
/* Mips systems use the SDB functions to dump out symbols, but
do not supply usable syms.h include files. */
#if defined(USG) && !defined(MIPS) && !defined (hpux)
#if defined(USG) && !defined(MIPS) && !defined (hpux) && !defined(WINNT)
#include <syms.h>
/* Use T_INT if we don't have T_VOID. */
#ifndef T_VOID

View File

@ -1380,6 +1380,8 @@ expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
i = decode_reg_name (regname);
if (i >= 0 || i == -4)
++nclobbers;
else if (i == -2)
error ("unknown register name `%s' in `asm'", regname);
}
last_expr_type = 0;
@ -1569,8 +1571,7 @@ expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
continue;
}
error ("unknown register name `%s' in `asm'", regname);
return;
/* Ignore unknown register, error already signalled. */
}
/* Use QImode since that's guaranteed to clobber just one reg. */
@ -1730,7 +1731,7 @@ warn_if_unused_value (exp)
while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
tem = TREE_OPERAND (tem, 0);
if (TREE_CODE (tem) == MODIFY_EXPR)
if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR)
return 0;
}
/* ... fall through ... */
@ -1932,6 +1933,17 @@ expand_start_else ()
cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
}
/* After calling expand_start_else, turn this "else" into an "else if"
by providing another condition. */
void
expand_elseif (cond)
tree cond;
{
cond_stack->data.cond.next_label = gen_label_rtx ();
do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
}
/* Generate RTL for the end of an if-then.
Pop the record for it off of cond_stack. */
@ -2501,7 +2513,15 @@ expand_return (retval)
}
/* Are any cleanups needed? E.g. C++ destructors to be run? */
/* This is not sufficient. We also need to watch for cleanups of the
expression we are about to expand. Unfortunately, we cannot know
if it has cleanups until we expand it, and we want to change how we
expand it depending upon if we need cleanups. We can't win. */
#if 0
cleanups = any_pending_cleanups (1);
#else
cleanups = 1;
#endif
if (TREE_CODE (retval) == RESULT_DECL)
retval_rhs = retval;
@ -3510,6 +3530,17 @@ expand_anon_union_decl (decl, cleanup, decl_elts)
tree cleanup_elt = TREE_PURPOSE (decl_elts);
enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
/* Propagate the union's alignment to the elements. */
DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
/* If the element has BLKmode and the union doesn't, the union is
aligned such that the element doesn't need to have BLKmode, so
change the element's mode to the appropriate one for its size. */
if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
DECL_MODE (decl_elt) = mode
= mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
MODE_INT, 1);
/* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
instead create a new MEM rtx with the proper mode. */
if (GET_CODE (x) == MEM)
@ -4055,6 +4086,191 @@ bc_pushcase (value, label)
return 0;
}
/* Returns the number of possible values of TYPE.
Returns -1 if the number is unknown or variable.
Returns -2 if the number does not fit in a HOST_WIDE_INT.
Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
do not increase monotonically (there may be duplicates);
to 1 if the values increase monotonically, but not always by 1;
otherwise sets it to 0. */
HOST_WIDE_INT
all_cases_count (type, spareness)
tree type;
int *spareness;
{
HOST_WIDE_INT count, count_high = 0;
*spareness = 0;
switch (TREE_CODE (type))
{
tree t;
case BOOLEAN_TYPE:
count = 2;
break;
case CHAR_TYPE:
count = 1 << BITS_PER_UNIT;
break;
default:
case INTEGER_TYPE:
if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
|| TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST)
return -1;
else
{
/* count
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
- TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
but with overflow checking. */
tree mint = TYPE_MIN_VALUE (type);
tree maxt = TYPE_MAX_VALUE (type);
HOST_WIDE_INT lo, hi;
neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
&lo, &hi);
add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
lo, hi, &lo, &hi);
add_double (lo, hi, 1, 0, &lo, &hi);
if (hi != 0 || lo < 0)
return -2;
count = lo;
}
break;
case ENUMERAL_TYPE:
count = 0;
for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
{
if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
|| TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
|| TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
!= TREE_INT_CST_LOW (TREE_VALUE (t)))
*spareness = 1;
count++;
}
if (*spareness == 1)
{
tree prev = TREE_VALUE (TYPE_VALUES (type));
for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
{
if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
{
*spareness = 2;
break;
}
prev = TREE_VALUE (t);
}
}
}
return count;
}
#define BITARRAY_TEST(ARRAY, INDEX) \
((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
& (1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR)))
#define BITARRAY_SET(ARRAY, INDEX) \
((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
|= 1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR))
/* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
with the case values we have seen, assuming the case expression
has the given TYPE.
SPARSENESS is as determined by all_cases_count.
The time needed is propotional to COUNT, unless
SPARSENESS is 2, in which case quadratic time is needed. */
void
mark_seen_cases (type, cases_seen, count, sparseness)
tree type;
unsigned char *cases_seen;
long count;
int sparseness;
{
long i;
tree next_node_to_try = NULL_TREE;
long next_node_offset = 0;
register struct case_node *n;
tree val = make_node (INTEGER_CST);
TREE_TYPE (val) = type;
for (n = case_stack->data.case_stmt.case_list; n;
n = n->right)
{
TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
while ( ! tree_int_cst_lt (n->high, val))
{
/* Calculate (into xlo) the "offset" of the integer (val).
The element with lowest value has offset 0, the next smallest
element has offset 1, etc. */
HOST_WIDE_INT xlo, xhi;
tree t;
if (sparseness == 2)
{
/* This less efficient loop is only needed to handle
duplicate case values (multiple enum constants
with the same value). */
for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
t = TREE_CHAIN (t), xlo++)
{
if (tree_int_cst_equal (val, TREE_VALUE (t)))
BITARRAY_SET (cases_seen, xlo);
}
}
else
{
if (sparseness && TYPE_VALUES (type) != NULL_TREE)
{
/* The TYPE_VALUES will be in increasing order, so
starting searching where we last ended. */
t = next_node_to_try;
xlo = next_node_offset;
xhi = 0;
for (;;)
{
if (t == NULL_TREE)
{
t = TYPE_VALUES (type);
xlo = 0;
}
if (tree_int_cst_equal (val, TREE_VALUE (t)))
{
next_node_to_try = TREE_CHAIN (t);
next_node_offset = xlo + 1;
break;
}
xlo++;
t = TREE_CHAIN (t);
if (t == next_node_to_try)
break;
}
}
else
{
t = TYPE_MIN_VALUE (type);
if (t)
neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
&xlo, &xhi);
else
xlo = xhi = 0;
add_double (xlo, xhi,
TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
&xlo, &xhi);
}
if (xhi == 0 && xlo >= 0 && xlo < count)
BITARRAY_SET (cases_seen, xlo);
}
add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
1, 0,
&TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
}
}
}
/* Called when the index of a switch statement is an enumerated type
and there is no default label.
@ -4075,37 +4291,55 @@ check_for_full_enumeration_handling (type)
register tree chain;
int all_values = 1;
/* True iff the selector type is a numbered set mode. */
int sparseness = 0;
/* The number of possible selector values. */
HOST_WIDE_INT size;
/* For each possible selector value. a one iff it has been matched
by a case value alternative. */
unsigned char *cases_seen;
/* The allocated size of cases_seen, in chars. */
long bytes_needed;
tree t;
if (output_bytecode)
{
bc_check_for_full_enumeration_handling (type);
return;
}
/* The time complexity of this loop is currently O(N * M), with
N being the number of members in the enumerated type, and
M being the number of case expressions in the switch. */
if (! warn_switch)
return;
for (chain = TYPE_VALUES (type);
chain;
chain = TREE_CHAIN (chain))
size = all_cases_count (type, &sparseness);
bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
if (size > 0 && size < 600000
/* We deliberately use malloc here - not xmalloc. */
&& (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
{
/* Find a match between enumeral and case expression, if possible.
Quit looking when we've gone too far (since case expressions
are kept sorted in ascending order). Warn about enumerators not
handled in the switch statement case expression list. */
long i;
tree v = TYPE_VALUES (type);
bzero (cases_seen, bytes_needed);
for (n = case_stack->data.case_stmt.case_list;
n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
n = n->right)
;
/* The time complexity of this code is normally O(N), where
N being the number of members in the enumerated type.
However, if type is a ENUMERAL_TYPE whose values do not
increase monotonically, quadratic time may be needed. */
if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
mark_seen_cases (type, cases_seen, size, sparseness);
for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
{
if (warn_switch)
if (BITARRAY_TEST(cases_seen, i) == 0)
warning ("enumeration value `%s' not handled in switch",
IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
all_values = 0;
IDENTIFIER_POINTER (TREE_PURPOSE (v)));
}
free (cases_seen);
}
/* Now we go the other way around; we warn if there are case

View File

@ -44,6 +44,10 @@ tree size_one_node;
The value is measured in bits. */
int maximum_field_alignment;
/* If non-zero, the alignment of a bitsting or (power-)set value, in bits.
May be overridden by front-ends. */
int set_alignment = 0;
#define GET_MODE_ALIGNMENT(MODE) \
MIN (BIGGEST_ALIGNMENT, \
MAX (1, (GET_MODE_UNIT_SIZE (MODE) * BITS_PER_UNIT)))
@ -84,8 +88,8 @@ variable_size (size)
{
/* If the language-processor is to take responsibility for variable-sized
items (e.g., languages which have elaboration procedures like Ada),
just return SIZE unchanged. */
if (global_bindings_p () < 0)
just return SIZE unchanged. Likewise for self-referential sizes. */
if (global_bindings_p () < 0 || contains_placeholder_p (size))
return size;
size = save_expr (size);
@ -898,6 +902,31 @@ layout_type (type)
TYPE_ALIGN (type) = GET_MODE_ALIGNMENT (TYPE_MODE (type));
break;
case SET_TYPE:
if (TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST
|| TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
abort();
else
{
#ifndef SET_WORD_SIZE
#define SET_WORD_SIZE BITS_PER_WORD
#endif
int alignment = set_alignment ? set_alignment : SET_WORD_SIZE;
int size_in_bits =
TREE_INT_CST_LOW (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
- TREE_INT_CST_LOW (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) + 1;
int rounded_size
= ((size_in_bits + alignment - 1) / alignment) * alignment;
if (rounded_size > alignment)
TYPE_MODE (type) = BLKmode;
else
TYPE_MODE (type) = mode_for_size (alignment, MODE_INT, 1);
TYPE_SIZE (type) = size_int (rounded_size);
TYPE_ALIGN (type) = alignment;
TYPE_PRECISION (type) = size_in_bits;
}
break;
case FILE_TYPE:
/* The size may vary in different languages, so the language front end
should fill in the size. */

View File

@ -82,6 +82,11 @@ static int *reg_order;
static char *regs_live;
/* Indexed by reg number, nonzero if reg was used in a SUBREG that changes
its size. */
static char *regs_change_size;
/* Indexed by insn's suid, the set of hard regs live after that insn. */
static HARD_REG_SET *after_insn_hard_regs;
@ -93,7 +98,7 @@ static HARD_REG_SET *after_insn_hard_regs;
static int stupid_reg_compare PROTO((int *, int *));
static int stupid_find_reg PROTO((int, enum reg_class, enum machine_mode,
int, int));
int, int, int));
static void stupid_mark_refs PROTO((rtx, rtx));
/* Stupid life analysis is for the case where only variables declared
@ -157,6 +162,9 @@ stupid_life_analysis (f, nregs, file)
reg_order = (int *) alloca (nregs * sizeof (int));
bzero ((char *) reg_order, nregs * sizeof (int));
regs_change_size = (char *) alloca (nregs * sizeof (char));
bzero ((char *) regs_change_size, nregs * sizeof (char));
reg_renumber = (short *) oballoc (nregs * sizeof (short));
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
reg_renumber[i] = i;
@ -254,7 +262,8 @@ stupid_life_analysis (f, nregs, file)
reg_preferred_class (r),
PSEUDO_REGNO_MODE (r),
reg_where_born[r],
reg_where_dead[r]);
reg_where_dead[r],
regs_change_size[r]);
/* If no reg available in that class, try alternate class. */
if (reg_renumber[r] == -1 && reg_alternate_class (r) != NO_REGS)
@ -262,7 +271,8 @@ stupid_life_analysis (f, nregs, file)
reg_alternate_class (r),
PSEUDO_REGNO_MODE (r),
reg_where_born[r],
reg_where_dead[r]);
reg_where_dead[r],
regs_change_size[r]);
}
if (file)
@ -303,14 +313,19 @@ stupid_reg_compare (r1p, r2p)
Return -1 if such a block cannot be found.
If CALL_PRESERVED is nonzero, insist on registers preserved
over subroutine calls, and return -1 if cannot find such. */
over subroutine calls, and return -1 if cannot find such.
If CHANGES_SIZE is nonzero, it means this register was used as the
operand of a SUBREG that changes its size. */
static int
stupid_find_reg (call_preserved, class, mode, born_insn, dead_insn)
stupid_find_reg (call_preserved, class, mode,
born_insn, dead_insn, changes_size)
int call_preserved;
enum reg_class class;
enum machine_mode mode;
int born_insn, dead_insn;
int changes_size;
{
register int i, ins;
#ifdef HARD_REG_SET
@ -339,6 +354,12 @@ stupid_find_reg (call_preserved, class, mode, born_insn, dead_insn)
IOR_COMPL_HARD_REG_SET (used, reg_class_contents[(int) class]);
#ifdef CLASS_CANNOT_CHANGE_SIZE
if (changes_size)
IOR_HARD_REG_SET (used,
reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE]);
#endif
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
{
#ifdef REG_ALLOC_ORDER
@ -471,9 +492,18 @@ stupid_mark_refs (x, insn)
return;
}
else if (code == SUBREG
&& GET_CODE (SUBREG_REG (x)) == REG
&& REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
&& (GET_MODE_SIZE (GET_MODE (x))
!= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
&& (INTEGRAL_MODE_P (GET_MODE (x))
|| INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (x)))))
regs_change_size[REGNO (SUBREG_REG (x))] = 1;
/* Register value being used, not set. */
if (code == REG)
else if (code == REG)
{
regno = REGNO (x);
if (regno < FIRST_PSEUDO_REGISTER)

View File

@ -36,6 +36,7 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
#include <ctype.h>
#include <sys/stat.h>
#ifndef WINNT
#ifdef USG
#undef FLOAT
#include <sys/param.h>
@ -50,6 +51,7 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
#include <sys/resource.h>
#endif
#endif
#endif
#include "input.h"
#include "tree.h"
@ -430,11 +432,7 @@ int flag_inline_functions;
int flag_keep_inline_functions;
/* Nonzero means that functions declared `inline' will be treated
as `static'. Prevents generation of zillions of copies of unused
static inline functions; instead, `inlines' are written out
only when actually used. Used in conjunction with -g. Also
does the right thing with #pragma interface. */
/* Nonzero means that functions will not be inlined. */
int flag_no_inline;
@ -640,6 +638,8 @@ char *lang_options[] =
"-+e0", /* gcc.c tacks the `-' on the front. */
"-+e1",
"-+e2",
"-faccess-control",
"-fno-access-control",
"-fall-virtual",
"-fno-all-virtual",
"-falt-external-templates",
@ -707,6 +707,10 @@ char *lang_options[] =
"-Wno-non-virtual-dtor",
"-Wextern-inline",
"-Wno-extern-inline",
"-Wreorder",
"-Wno-reorder",
"-Wsynth",
"-Wno-synth",
/* these are for obj c */
"-lang-objc",
@ -849,6 +853,9 @@ int dump_time;
int
get_run_time ()
{
#ifdef WINNT
return 0;
#else
#ifdef USG
struct tms tms;
#else
@ -881,6 +888,7 @@ get_run_time ()
return (vms_times.proc_user_time + vms_times.proc_system_time) * 10000;
#endif
#endif
#endif
}
#define TIMEVAR(VAR, BODY) \
@ -942,20 +950,17 @@ fatal_io_error (name)
exit (35);
}
/* Called to give a better error message when we don't have an insn to match
what we are looking for or if the insn's constraints aren't satisfied,
rather than just calling abort(). */
/* Called to give a better error message for a bad insn rather than
just calling abort(). */
void
fatal_insn_not_found (insn)
fatal_insn (message, insn)
char *message;
rtx insn;
{
if (!output_bytecode)
{
if (INSN_CODE (insn) < 0)
error ("internal error--unrecognizable insn:");
else
error ("internal error--insn does not satisfy its constraints:");
error (message);
debug_rtx (insn);
}
if (asm_out_file)
@ -993,6 +998,20 @@ fatal_insn_not_found (insn)
abort ();
}
/* Called to give a better error message when we don't have an insn to match
what we are looking for or if the insn's constraints aren't satisfied,
rather than just calling abort(). */
void
fatal_insn_not_found (insn)
rtx insn;
{
if (INSN_CODE (insn) < 0)
fatal_insn ("internal error--unrecognizable insn:", insn);
else
fatal_insn ("internal error--insn does not satisfy its constraints:", insn);
}
/* This is the default decl_printable_name function. */
static char *
@ -2719,6 +2738,7 @@ rest_of_compilation (decl)
if (warn_inline && specd)
warning_with_decl (decl, lose);
DECL_INLINE (decl) = 0;
DECL_ABSTRACT_ORIGIN (decl) = 0;
/* Don't really compile an extern inline function.
If we can't make it inline, pretend
it was only declared. */
@ -3927,8 +3947,7 @@ You Lose! You must define PREFERRED_DEBUGGING_TYPE!
compile_file (filename);
#ifndef OS2
#ifndef VMS
#if !defined(OS2) && !defined(VMS) && !defined(WINNT)
if (flag_print_mem)
{
#ifdef __alpha
@ -3946,8 +3965,7 @@ You Lose! You must define PREFERRED_DEBUGGING_TYPE!
system ("ps v");
#endif /* not USG */
}
#endif /* not VMS */
#endif /* not OS2 */
#endif /* not OS2 and not VMS and not WINNT */
if (errorcount)
exit (FATAL_EXIT_CODE);

View File

@ -319,8 +319,9 @@ gcc_obstack_init (obstack)
This is used before starting a nested function. */
void
save_tree_status (p)
save_tree_status (p, toplevel)
struct function *p;
int toplevel;
{
p->all_types_permanent = all_types_permanent;
p->momentary_stack = momentary_stack;
@ -334,10 +335,15 @@ save_tree_status (p)
p->saveable_obstack = saveable_obstack;
p->rtl_obstack = rtl_obstack;
/* Objects that need to be saved in this function can be in the nonsaved
obstack of the enclosing function since they can't possibly be needed
once it has returned. */
function_maybepermanent_obstack = function_obstack;
if (! toplevel)
{
/* Objects that need to be saved in this function can be in the nonsaved
obstack of the enclosing function since they can't possibly be needed
once it has returned. */
function_maybepermanent_obstack = function_obstack;
maybepermanent_firstobj
= (char *) obstack_finish (function_maybepermanent_obstack);
}
function_obstack = (struct obstack *) xmalloc (sizeof (struct obstack));
gcc_obstack_init (function_obstack);
@ -348,30 +354,32 @@ save_tree_status (p)
momentary_firstobj = (char *) obstack_finish (&momentary_obstack);
momentary_function_firstobj = momentary_firstobj;
maybepermanent_firstobj
= (char *) obstack_finish (function_maybepermanent_obstack);
}
/* Restore all variables describing the current status from the structure *P.
This is used after a nested function. */
void
restore_tree_status (p)
restore_tree_status (p, toplevel)
struct function *p;
int toplevel;
{
all_types_permanent = p->all_types_permanent;
momentary_stack = p->momentary_stack;
obstack_free (&momentary_obstack, momentary_function_firstobj);
/* Free saveable storage used by the function just compiled and not
saved.
if (! toplevel)
{
/* Free saveable storage used by the function just compiled and not
saved.
CAUTION: This is in function_obstack of the containing function. So
we must be sure that we never allocate from that obstack during
the compilation of a nested function if we expect it to survive past the
nested function's end. */
obstack_free (function_maybepermanent_obstack, maybepermanent_firstobj);
CAUTION: This is in function_obstack of the containing function.
So we must be sure that we never allocate from that obstack during
the compilation of a nested function if we expect it to survive
past the nested function's end. */
obstack_free (function_maybepermanent_obstack, maybepermanent_firstobj);
}
obstack_free (function_obstack, 0);
free (function_obstack);
@ -519,7 +527,10 @@ permanent_allocation (function_end)
/* Free up previous temporary obstack data */
obstack_free (&temporary_obstack, temporary_firstobj);
if (function_end)
obstack_free (&momentary_obstack, momentary_function_firstobj);
{
obstack_free (&momentary_obstack, momentary_function_firstobj);
momentary_firstobj = momentary_function_firstobj;
}
else
obstack_free (&momentary_obstack, momentary_firstobj);
obstack_free (&maybepermanent_obstack, maybepermanent_firstobj);
@ -2353,6 +2364,13 @@ stabilize_reference (ref)
stabilize_reference_1 (TREE_OPERAND (ref, 1)));
break;
case COMPOUND_EXPR:
result = build_nt (COMPOUND_EXPR,
stabilize_reference_1 (TREE_OPERAND (ref, 0)),
stabilize_reference (TREE_OPERAND (ref, 1)));
break;
/* If arg isn't a kind of lvalue we recognize, make no change.
Caller should recognize the error for an invalid lvalue. */
default:
@ -2792,19 +2810,14 @@ build_type_variant (type, constp, volatilep)
constp = !!constp;
volatilep = !!volatilep;
/* If not generating auxiliary info, search the chain of variants to see
if there is already one there just like the one we need to have. If so,
use that existing one.
/* Search the chain of variants to see if there is already one there just
like the one we need to have. If so, use that existing one. We must
preserve the TYPE_NAME, since there is code that depends on this. */
We don't do this in the case where we are generating aux info because
in that case we want each typedef names to get it's own distinct type
node, even if the type of this new typedef is the same as some other
(existing) type. */
if (!flag_gen_aux_info)
for (t = TYPE_MAIN_VARIANT(type); t; t = TYPE_NEXT_VARIANT (t))
if (constp == TYPE_READONLY (t) && volatilep == TYPE_VOLATILE (t))
return t;
for (t = TYPE_MAIN_VARIANT(type); t; t = TYPE_NEXT_VARIANT (t))
if (constp == TYPE_READONLY (t) && volatilep == TYPE_VOLATILE (t)
&& TYPE_NAME (t) == TYPE_NAME (type))
return t;
/* We need a new one. */
@ -2819,7 +2832,7 @@ build_type_variant (type, constp, volatilep)
This is the right thing to do only when something else
about TYPE is modified in place. */
tree
void
change_main_variant (type, new_main)
tree type, new_main;
{
@ -3054,7 +3067,9 @@ type_list_equal (l1, l2)
int cmp = simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
if (cmp < 0)
abort ();
if (cmp == 0)
if (cmp == 0
|| TREE_TYPE (TREE_PURPOSE (t1))
!= TREE_TYPE (TREE_PURPOSE (t2)))
return 0;
}
}
@ -3843,7 +3858,7 @@ decl_function_context (decl)
{
if (TREE_CODE (context) == RECORD_TYPE
|| TREE_CODE (context) == UNION_TYPE)
context = TYPE_CONTEXT (context);
context = NULL_TREE;
else if (TREE_CODE (context) == TYPE_DECL)
context = DECL_CONTEXT (context);
else if (TREE_CODE (context) == BLOCK)
@ -3994,3 +4009,102 @@ get_file_function_name (kind)
return get_identifier (buf);
}
/* Expand (the constant part of) a SET_TYPE CONTRUCTOR node.
The result is placed in BUFFER (which has length BIT_SIZE),
with one bit in each char ('\000' or '\001').
If the constructor is constant, NULL_TREE is returned.
Otherwise, a TREE_LIST of the non-constant elements is emitted. */
tree
get_set_constructor_bits (init, buffer, bit_size)
tree init;
char *buffer;
int bit_size;
{
int i;
tree vals;
HOST_WIDE_INT domain_min
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (init))));
tree non_const_bits = NULL_TREE;
for (i = 0; i < bit_size; i++)
buffer[i] = 0;
for (vals = TREE_OPERAND (init, 1);
vals != NULL_TREE; vals = TREE_CHAIN (vals))
{
if (TREE_CODE (TREE_VALUE (vals)) != INTEGER_CST
|| (TREE_PURPOSE (vals) != NULL_TREE
&& TREE_CODE (TREE_PURPOSE (vals)) != INTEGER_CST))
non_const_bits =
tree_cons (TREE_PURPOSE (vals), TREE_VALUE (vals), non_const_bits);
else if (TREE_PURPOSE (vals) != NULL_TREE)
{
/* Set a range of bits to ones. */
HOST_WIDE_INT lo_index
= TREE_INT_CST_LOW (TREE_PURPOSE (vals)) - domain_min;
HOST_WIDE_INT hi_index
= TREE_INT_CST_LOW (TREE_VALUE (vals)) - domain_min;
if (lo_index < 0 || lo_index >= bit_size
|| hi_index < 0 || hi_index >= bit_size)
abort ();
for ( ; lo_index <= hi_index; lo_index++)
buffer[lo_index] = 1;
}
else
{
/* Set a single bit to one. */
HOST_WIDE_INT index
= TREE_INT_CST_LOW (TREE_VALUE (vals)) - domain_min;
if (index < 0 || index >= bit_size)
{
error ("invalid initializer for bit string");
return NULL_TREE;
}
buffer[index] = 1;
}
}
return non_const_bits;
}
/* Expand (the constant part of) a SET_TYPE CONTRUCTOR node.
The result is placed in BUFFER (which is an array of WD_SIZE
words). TYPE_ALIGN bits are stored in each element of BUFFER.
If the constructor is constant, NULL_TREE is returned.
Otherwise, a TREE_LIST of the non-constant elements is emitted. */
tree
get_set_constructor_words (init, buffer, wd_size)
tree init;
HOST_WIDE_INT *buffer;
int wd_size;
{
int i;
tree vals = TREE_OPERAND (init, 1);
int set_word_size = TYPE_ALIGN (TREE_TYPE (init));
int bit_size = wd_size * set_word_size;
int bit_pos = 0;
HOST_WIDE_INT *wordp = buffer;
char *bit_buffer = (char*)alloca(bit_size);
tree non_const_bits = get_set_constructor_bits (init, bit_buffer, bit_size);
for (i = 0; i < wd_size; i++)
buffer[i] = 0;
for (i = 0; i < bit_size; i++)
{
if (bit_buffer[i])
{
#if BITS_BIG_ENDIAN
*wordp |= (1 << (set_word_size - 1 - bit_pos));
#else
*wordp |= 1 << bit_pos;
#endif
}
bit_pos++;
if (bit_pos >= set_word_size)
bit_pos = 0, wordp++;
}
return non_const_bits;
}

View File

@ -1335,11 +1335,27 @@ calculate_giv_inc (pattern, src_insn, regno)
one of the LO_SUM rtx. */
if (GET_CODE (increment) == LO_SUM)
increment = XEXP (increment, 1);
else if (GET_CODE (increment) == IOR)
{
/* The rs6000 port loads some constants with IOR. */
rtx second_part = XEXP (increment, 1);
src_insn = PREV_INSN (src_insn);
increment = SET_SRC (PATTERN (src_insn));
/* Don't need the last insn anymore. */
delete_insn (get_last_insn ());
if (GET_CODE (second_part) != CONST_INT
|| GET_CODE (increment) != CONST_INT)
abort ();
increment = GEN_INT (INTVAL (increment) | INTVAL (second_part));
}
if (GET_CODE (increment) != CONST_INT)
abort ();
/* The insn loading the constant into a register is not longer needed,
/* The insn loading the constant into a register is no longer needed,
so delete it. */
delete_insn (get_last_insn ());
}
@ -1730,12 +1746,32 @@ copy_loop_body (copy_start, copy_end, map, exit_label, last_iteration,
case to be a branch past the end of the loop, and the
original jump label case to fall_through. */
if (! invert_exp (pattern, copy)
|| ! redirect_exp (&pattern,
map->label_map[CODE_LABEL_NUMBER
(JUMP_LABEL (insn))],
exit_label, copy))
abort ();
if (invert_exp (pattern, copy))
{
if (! redirect_exp (&pattern,
map->label_map[CODE_LABEL_NUMBER
(JUMP_LABEL (insn))],
exit_label, copy))
abort ();
}
else
{
rtx jmp;
rtx lab = gen_label_rtx ();
/* Can't do it by reversing the jump (probably becasue we
couln't reverse the conditions), so emit a new
jump_insn after COPY, and redirect the jump around
that. */
jmp = emit_jump_insn_after (gen_jump (exit_label), copy);
jmp = emit_barrier_after (jmp);
emit_label_after (lab, jmp);
LABEL_NUSES (lab) = 0;
if (! redirect_exp (&pattern,
map->label_map[CODE_LABEL_NUMBER
(JUMP_LABEL (insn))],
lab, copy))
abort ();
}
}
#ifdef HAVE_cc0
@ -3077,7 +3113,11 @@ loop_iterations (loop_start, loop_end)
loop_final_value = 0;
loop_iteration_var = 0;
last_loop_insn = prev_nonnote_insn (loop_end);
/* We used to use pren_nonnote_insn here, but that fails because it might
accidentally get the branch for a contained loop if the branch for this
loop was deleted. We can only trust branches immediately before the
loop_end. */
last_loop_insn = PREV_INSN (loop_end);
comparison = get_condition_for_loop (last_loop_insn);
if (comparison == 0)
@ -3115,28 +3155,6 @@ loop_iterations (loop_start, loop_end)
/* iteration_info already printed a message. */
return 0;
if (increment == 0)
{
if (loop_dump_stream)
fprintf (loop_dump_stream,
"Loop unrolling: Increment value can't be calculated.\n");
return 0;
}
if (GET_CODE (increment) != CONST_INT)
{
if (loop_dump_stream)
fprintf (loop_dump_stream,
"Loop unrolling: Increment value not constant.\n");
return 0;
}
if (GET_CODE (initial_value) != CONST_INT)
{
if (loop_dump_stream)
fprintf (loop_dump_stream,
"Loop unrolling: Initial value not constant.\n");
return 0;
}
/* If the comparison value is an invariant register, then try to find
its value from the insns before the start of the loop. */
@ -3185,7 +3203,28 @@ loop_iterations (loop_start, loop_end)
loop_increment = increment;
loop_final_value = final_value;
if (final_value == 0)
if (increment == 0)
{
if (loop_dump_stream)
fprintf (loop_dump_stream,
"Loop unrolling: Increment value can't be calculated.\n");
return 0;
}
else if (GET_CODE (increment) != CONST_INT)
{
if (loop_dump_stream)
fprintf (loop_dump_stream,
"Loop unrolling: Increment value not constant.\n");
return 0;
}
else if (GET_CODE (initial_value) != CONST_INT)
{
if (loop_dump_stream)
fprintf (loop_dump_stream,
"Loop unrolling: Initial value not constant.\n");
return 0;
}
else if (final_value == 0)
{
if (loop_dump_stream)
fprintf (loop_dump_stream,

View File

@ -1358,10 +1358,9 @@ contains_pointers_p (type)
}
}
/* Output text storage for constructor CONSTR. Returns rtx of
storage. */
/* Output text storage for constructor CONSTR. */
rtx
void
bc_output_constructor (constr)
tree constr;
{

View File

@ -1 +1 @@
char *version_string = "2.6.0";
char *version_string = "2.6.1";

View File

@ -93,7 +93,6 @@ typedef unsigned char U_CHAR;
/* VMS-specific definitions */
#ifdef VMS
#include <time.h>
#include <perror.h> /* This defines sys_errlist/sys_nerr properly */
#include <descrip.h>
#define O_RDONLY 0 /* Open arg for Read/Only */
#define O_WRONLY 1 /* Open arg for Write/Only */
@ -186,13 +185,22 @@ extern char *getenv ();
extern FILE *fdopen ();
extern char *version_string;
extern struct tm *localtime ();
#ifndef VMS
#ifndef HAVE_STRERROR
extern int sys_nerr;
#if defined(bsd4_4) || defined(__NetBSD__)
#if defined(bsd4_4) || defined(__NetBSD__) || defined(__FreeBSD__)
extern const char *const sys_errlist[];
#else
extern char *sys_errlist[];
#endif
#else /* HAVE_STERRROR */
char *strerror ();
#endif
#else /* VMS */
char *strerror (int,...);
#endif
extern int parse_escape ();
extern HOST_WIDE_INT parse_c_expression ();
#ifndef errno
extern int errno;
@ -276,7 +284,7 @@ static void write_output ();
static int check_macro_name ();
static int compare_defs ();
static int compare_token_lists ();
static int eval_if_expression ();
static HOST_WIDE_INT eval_if_expression ();
static int discard_comments ();
static int change_newlines ();
static int line_for_error ();
@ -388,6 +396,11 @@ static int print_include_names = 0;
static int no_line_commands;
/* Nonzero means output the text in failing conditionals,
inside #failed ... #endfailed. */
static int output_conditionals;
/* dump_only means inhibit output of the preprocessed text
and instead output the definitions of all user-defined
macros in a form suitable for use as input to cccp.
@ -1123,7 +1136,12 @@ main (argc, argv)
#endif
p = argv[0] + strlen (argv[0]);
while (p != argv[0] && p[-1] != '/') --p;
while (p != argv[0] && p[-1] != '/'
#ifdef DIR_SEPARATOR
&& p[-1] != DIR_SEPARATOR
#endif
)
--p;
progname = p;
#ifdef VMS
@ -1195,6 +1213,9 @@ main (argc, argv)
else
include_prefix = argv[++i];
}
if (!strcmp (argv[i], "-ifoutput")) {
output_conditionals = 1;
}
if (!strcmp (argv[i], "-isystem")) {
struct file_name_list *dirtmp;
@ -1322,7 +1343,10 @@ main (argc, argv)
pedantic = 1;
pedantic_errors = 1;
} else if (!strcmp (argv[i], "-pcp")) {
char *pcp_fname = argv[++i];
char *pcp_fname;
if (i + 1 == argc)
fatal ("Filename missing after -pcp option");
pcp_fname = argv[++i];
pcp_outfile =
((pcp_fname[0] != '-' || pcp_fname[1] != '\0')
? fopen (pcp_fname, "w")
@ -1429,6 +1453,8 @@ main (argc, argv)
/* For -MD and -MMD options, write deps on file named by next arg. */
if (!strcmp (argv[i], "-MD")
|| !strcmp (argv[i], "-MMD")) {
if (i + 1 == argc)
fatal ("Filename missing after %s option", argv[i]);
i++;
deps_file = argv[i];
deps_mode = "w";
@ -1606,7 +1632,11 @@ main (argc, argv)
/* Some people say that CPATH should replace the standard include dirs,
but that seems pointless: it comes before them, so it overrides them
anyway. */
#ifdef WINNT
p = (char *) getenv ("Include");
#else
p = (char *) getenv ("CPATH");
#endif
if (p != 0 && ! no_standard_includes)
path_include (p);
@ -1958,7 +1988,11 @@ main (argc, argv)
int len;
/* Discard all directory prefixes from filename. */
if ((q = rindex (in_fname, '/')) != NULL)
if ((q = rindex (in_fname, '/')) != NULL
#ifdef DIR_SEPARATOR
&& (q = rindex (in_fname, DIR_SEPARATOR)) != NULL
#endif
)
++q;
else
q = in_fname;
@ -2175,6 +2209,33 @@ path_include (path)
}
}
/* Return the address of the first character in S that equals C.
S is an array of length N, possibly containing '\0's, and followed by '\0'.
Return 0 if there is no such character. Assume that C itself is not '\0'.
If we knew we could use memchr, we could just invoke memchr (S, C, N),
but unfortunately memchr isn't autoconfigured yet. */
static U_CHAR *
index0 (s, c, n)
U_CHAR *s;
int c;
int n;
{
for (;;) {
char *q = index (s, c);
if (q)
return (U_CHAR *) q;
else {
int l = strlen (s);
if (l == n)
return 0;
l++;
s += l;
n -= l;
}
}
}
/* Pre-C-Preprocessor to translate ANSI trigraph idiocy in BUF
before main CCCP processing. Name `pcp' is also in honor of the
drugs the trigraph designers must have been on.
@ -2188,11 +2249,12 @@ static void
trigraph_pcp (buf)
FILE_BUF *buf;
{
register U_CHAR c, *fptr, *bptr, *sptr;
register U_CHAR c, *fptr, *bptr, *sptr, *lptr;
int len;
fptr = bptr = sptr = buf->buf;
while ((sptr = (U_CHAR *) index (sptr, '?')) != NULL) {
lptr = fptr + buf->length;
while ((sptr = (U_CHAR *) index0 (sptr, '?', lptr - sptr)) != NULL) {
if (*++sptr != '?')
continue;
switch (*++sptr) {
@ -2261,25 +2323,15 @@ newline_fix (bp)
U_CHAR *bp;
{
register U_CHAR *p = bp;
register int count = 0;
/* First count the backslash-newline pairs here. */
while (1) {
if (p[0] == '\\') {
if (p[1] == '\n')
p += 2, count++;
else if (p[1] == '\r' && p[2] == '\n')
p += 3, count++;
else
break;
} else
break;
}
while (p[0] == '\\' && p[1] == '\n')
p += 2;
/* What follows the backslash-newlines is not embarrassing. */
if (count == 0 || (*p != '/' && *p != '*'))
if (*p != '/' && *p != '*')
return;
/* Copy all potentially embarrassing characters
@ -2290,7 +2342,7 @@ newline_fix (bp)
*bp++ = *p++;
/* Now write the same number of pairs after the embarrassing chars. */
while (count-- > 0) {
while (bp < p) {
*bp++ = '\\';
*bp++ = '\n';
}
@ -2304,24 +2356,14 @@ name_newline_fix (bp)
U_CHAR *bp;
{
register U_CHAR *p = bp;
register int count = 0;
/* First count the backslash-newline pairs here. */
while (1) {
if (p[0] == '\\') {
if (p[1] == '\n')
p += 2, count++;
else if (p[1] == '\r' && p[2] == '\n')
p += 3, count++;
else
break;
} else
break;
}
while (p[0] == '\\' && p[1] == '\n')
p += 2;
/* What follows the backslash-newlines is not embarrassing. */
if (count == 0 || !is_idchar[*p])
if (!is_idchar[*p])
return;
/* Copy all potentially embarrassing characters
@ -2332,7 +2374,7 @@ name_newline_fix (bp)
*bp++ = *p++;
/* Now write the same number of pairs after the embarrassing chars. */
while (count-- > 0) {
while (bp < p) {
*bp++ = '\\';
*bp++ = '\n';
}
@ -2495,7 +2537,7 @@ do { ip = &instack[indepth]; \
obp = op->bufp; } while (0)
if (no_output && instack[indepth].fname != 0)
skip_if_group (&instack[indepth], 1);
skip_if_group (&instack[indepth], 1, NULL);
obp = op->bufp;
RECACHE;
@ -2513,22 +2555,25 @@ do { ip = &instack[indepth]; \
switch (c) {
case '\\':
if (ibp >= limit)
break;
if (*ibp == '\n') {
/* Always merge lines ending with backslash-newline,
even in middle of identifier. */
if (*ibp == '\n' && !ip->macro) {
/* At the top level, always merge lines ending with backslash-newline,
even in middle of identifier. But do not merge lines in a macro,
since backslash might be followed by a newline-space marker. */
++ibp;
++ip->lineno;
--obp; /* remove backslash from obuf */
break;
}
/* If ANSI, backslash is just another character outside a string. */
if (!traditional)
goto randomchar;
/* Otherwise, backslash suppresses specialness of following char,
so copy it here to prevent the switch from seeing it.
But first get any pending identifier processed. */
if (ident_length > 0)
goto specialchar;
*obp++ = *ibp++;
if (ibp < limit)
*obp++ = *ibp++;
break;
case '#':
@ -2608,7 +2653,7 @@ do { ip = &instack[indepth]; \
/* If not generating expanded output,
what we do with ordinary text is skip it.
Discard everything until next # directive. */
skip_if_group (&instack[indepth], 1);
skip_if_group (&instack[indepth], 1, 0);
RECACHE;
beg_of_line = ibp;
break;
@ -2632,7 +2677,7 @@ do { ip = &instack[indepth]; \
/* If not generating expanded output, ignore everything until
next # directive. */
if (no_output && instack[indepth].fname)
skip_if_group (&instack[indepth], 1);
skip_if_group (&instack[indepth], 1, 0);
obp = op->bufp;
RECACHE;
beg_of_line = ibp;
@ -2790,6 +2835,9 @@ do { ip = &instack[indepth]; \
char *lintcmd = get_lintcmd (ibp, limit, &argbp, &arglen, &cmdlen);
if (lintcmd != NULL) {
op->bufp = obp;
check_expand (op, cmdlen + arglen + 14);
obp = op->bufp;
/* I believe it is always safe to emit this newline: */
obp[-1] = '\n';
bcopy ("#pragma lint ", (char *) obp, 13);
@ -2804,10 +2852,12 @@ do { ip = &instack[indepth]; \
}
/* OK, now bring us back to the state we were in before we entered
this branch. We need #line b/c the newline for the pragma
could fuck things up. */
this branch. We need #line because the #pragma's newline always
messes up the line count. */
op->bufp = obp;
output_line_command (ip, op, 0, same_file);
*(obp++) = ' '; /* just in case, if comments are copied thru */
check_expand (op, limit - ibp + 2);
obp = op->bufp;
*(obp++) = '/';
}
}
@ -2884,9 +2934,7 @@ do { ip = &instack[indepth]; \
ibp += 2;
}
c = *ibp++;
/* ".." terminates a preprocessing number. This is useless for C
code but useful for preprocessing other things. */
if (!isalnum (c) && (c != '.' || *ibp == '.') && c != '_') {
if (!is_idchar[c] && c != '.') {
--ibp;
break;
}
@ -2988,7 +3036,7 @@ do { ip = &instack[indepth]; \
if (ip->lineno != op->lineno) {
op->bufp = obp;
output_line_command (ip, op, 1, same_file);
check_expand (op, ip->length - (ip->bufp - ip->buf));
check_expand (op, limit - ibp);
obp = op->bufp;
}
break;
@ -3226,9 +3274,17 @@ do { ip = &instack[indepth]; \
/* This is now known to be a macro call.
Discard the macro name from the output,
along with any following whitespace just copied. */
along with any following whitespace just copied,
but preserve newlines at the top level since this
is more likely to do the right thing with line numbers. */
obp = op->buf + obufp_before_macroname;
op->lineno = op_lineno_before_macroname;
if (ip->macro != 0)
op->lineno = op_lineno_before_macroname;
else {
int newlines = op->lineno - op_lineno_before_macroname;
while (0 < newlines--)
*obp++ = '\n';
}
/* Prevent accidental token-pasting with a character
before the macro call. */
@ -3529,8 +3585,9 @@ handle_directive (ip, op)
if (*bp == '\n') {
ip->lineno++;
copy_command = 1;
}
bp++;
bp++;
} else if (traditional)
bp++;
}
break;
@ -3555,7 +3612,14 @@ handle_directive (ip, op)
case '<':
if (!kt->angle_brackets)
break;
while (*bp && *bp != '>') bp++;
while (bp < limit && *bp != '>' && *bp != '\n') {
if (*bp == '\\' && bp[1] == '\n') {
ip->lineno++;
copy_command = 1;
bp++;
}
bp++;
}
break;
case '/':
@ -3783,7 +3847,7 @@ timestamp ()
{
static struct tm *timebuf;
if (!timebuf) {
time_t t = time (0);
time_t t = time ((time_t *)0);
timebuf = localtime (&t);
}
return timebuf;
@ -3925,7 +3989,8 @@ special_symbol (hp, op)
goto oops;
if (hp = lookup (ip->bufp, -1, -1)) {
if (pcp_outfile && pcp_inside_if
&& hp->value.defn->predefined)
&& (hp->type == T_CONST
|| (hp->type == T_MACRO && hp->value.defn->predefined)))
/* Output a precondition for this macro use. */
fprintf (pcp_outfile, "#define %s\n", hp->name);
buf = " 1 ";
@ -5360,9 +5425,31 @@ create_definition (buf, limit, op)
} else {
/* Simple expansion or empty definition. */
/* Skip spaces and tabs if any. */
while (bp < limit && (*bp == ' ' || *bp == '\t'))
++bp;
if (bp < limit)
{
switch (*bp)
{
case '\t': case ' ':
/* Skip spaces and tabs. */
while (++bp < limit && (*bp == ' ' || *bp == '\t'))
continue;
break;
case '!': case '"': case '#': case '%': case '&': case '\'':
case ')': case '*': case '+': case ',': case '-': case '.':
case '/': case ':': case ';': case '<': case '=': case '>':
case '?': case '[': case '\\': case ']': case '^': case '{':
case '|': case '}': case '~':
warning ("missing white space after `#define %.*s'",
sym_length, symname);
break;
default:
pedwarn ("missing white space after `#define %.*s'",
sym_length, symname);
break;
}
}
/* Now everything from bp before limit is the definition. */
defn = collect_expansion (bp, limit, -1, NULL_PTR);
defn->args.argnames = (U_CHAR *) "";
@ -5659,16 +5746,8 @@ collect_expansion (buf, end, nargs, arglist)
expected_delimiter = c;
break;
/* Special hack: if a \# is written in the #define
include a # in the definition. This is useless for C code
but useful for preprocessing other things. */
case '\\':
/* \# quotes a # even outside of strings. */
if (p < limit && *p == '#' && !expected_delimiter) {
exp_p--;
*exp_p++ = *p++;
} else if (p < limit && expected_delimiter) {
if (p < limit && expected_delimiter) {
/* In a string, backslash goes through
and makes next char ordinary. */
*exp_p++ = *p++;
@ -6537,7 +6616,7 @@ do_ident (buf, limit)
free (trybuf.buf);
/* Output directive name. */
check_expand (op, 8);
check_expand (op, 7);
bcopy ("#ident ", (char *) op->bufp, 7);
op->bufp += 7;
@ -6647,11 +6726,11 @@ do_if (buf, limit, op, keyword)
FILE_BUF *op;
struct directive *keyword;
{
int value;
HOST_WIDE_INT value;
FILE_BUF *ip = &instack[indepth];
value = eval_if_expression (buf, limit - buf);
conditional_skip (ip, value == 0, T_IF, NULL_PTR);
conditional_skip (ip, value == 0, T_IF, NULL_PTR, op);
return 0;
}
@ -6666,7 +6745,7 @@ do_elif (buf, limit, op, keyword)
FILE_BUF *op;
struct directive *keyword;
{
int value;
HOST_WIDE_INT value;
FILE_BUF *ip = &instack[indepth];
if (if_stack == instack[indepth].if_stack) {
@ -6685,11 +6764,11 @@ do_elif (buf, limit, op, keyword)
}
if (if_stack->if_succeeded)
skip_if_group (ip, 0);
skip_if_group (ip, 0, op);
else {
value = eval_if_expression (buf, limit - buf);
if (value == 0)
skip_if_group (ip, 0);
skip_if_group (ip, 0, op);
else {
++if_stack->if_succeeded; /* continue processing input */
output_line_command (ip, op, 1, same_file);
@ -6702,16 +6781,16 @@ do_elif (buf, limit, op, keyword)
* evaluate a #if expression in BUF, of length LENGTH,
* then parse the result as a C expression and return the value as an int.
*/
static int
static HOST_WIDE_INT
eval_if_expression (buf, length)
U_CHAR *buf;
int length;
{
FILE_BUF temp_obuf;
HASHNODE *save_defined;
int value;
HOST_WIDE_INT value;
save_defined = install ("defined", -1, T_SPEC_DEFINED, 0, 0, -1);
save_defined = install ("defined", -1, T_SPEC_DEFINED, 0, NULL_PTR, -1);
pcp_inside_if = 1;
temp_obuf = expand_to_temp_buffer (buf, buf + length, 0, 1);
pcp_inside_if = 0;
@ -6790,7 +6869,9 @@ do_xifdef (buf, limit, op, keyword)
if (pcp_outfile) {
/* Output a precondition for this macro. */
if (hp && hp->value.defn->predefined)
if (hp &&
(hp->type == T_CONST
|| (hp->type == T_MACRO && hp->value.defn->predefined)))
fprintf (pcp_outfile, "#define %s\n", hp->name);
else {
U_CHAR *cp = buf;
@ -6809,7 +6890,7 @@ do_xifdef (buf, limit, op, keyword)
}
}
conditional_skip (ip, skip, T_IF, control_macro);
conditional_skip (ip, skip, T_IF, control_macro, op);
return 0;
}
@ -6819,11 +6900,12 @@ do_xifdef (buf, limit, op, keyword)
Otherwise, CONTROL_MACRO is 0. */
static void
conditional_skip (ip, skip, type, control_macro)
conditional_skip (ip, skip, type, control_macro, op)
FILE_BUF *ip;
int skip;
enum node_type type;
U_CHAR *control_macro;
FILE_BUF *op;
{
IF_STACK_FRAME *temp;
@ -6837,7 +6919,7 @@ conditional_skip (ip, skip, type, control_macro)
if_stack->type = type;
if (skip != 0) {
skip_if_group (ip, 0);
skip_if_group (ip, 0, op);
return;
} else {
++if_stack->if_succeeded;
@ -6851,9 +6933,10 @@ conditional_skip (ip, skip, type, control_macro)
* If ANY is nonzero, return at next directive of any sort.
*/
static void
skip_if_group (ip, any)
skip_if_group (ip, any, op)
FILE_BUF *ip;
int any;
FILE_BUF *op;
{
register U_CHAR *bp = ip->bufp, *cp;
register U_CHAR *endb = ip->buf + ip->length;
@ -6862,6 +6945,25 @@ skip_if_group (ip, any)
U_CHAR *beg_of_line = bp;
register int ident_length;
U_CHAR *ident, *after_ident;
/* Save info about where the group starts. */
U_CHAR *beg_of_group = bp;
int beg_lineno = ip->lineno;
if (output_conditionals && op != 0) {
char *ptr = "#failed\n";
int len = strlen (ptr);
if (op->bufp > op->buf && op->bufp[-1] != '\n')
{
*op->bufp++ = '\n';
op->lineno++;
}
check_expand (op, len);
bcopy (ptr, (char *) op->bufp, len);
op->bufp += len;
op->lineno++;
output_line_command (ip, op, 1, 0);
}
while (bp < endb) {
switch (*bp++) {
@ -7013,7 +7115,7 @@ skip_if_group (ip, any)
&& strncmp (cp, kt->name, kt->length) == 0) {
/* If we are asked to return on next directive, do so now. */
if (any)
return;
goto done;
switch (kt->type) {
case T_IF:
@ -7036,7 +7138,7 @@ skip_if_group (ip, any)
break;
}
else if (if_stack == save_if_stack)
return; /* found what we came for */
goto done; /* found what we came for */
if (kt->type != T_ENDIF) {
if (if_stack->type == T_ELSE)
@ -7058,10 +7160,32 @@ skip_if_group (ip, any)
pedwarn ("invalid preprocessor directive name");
}
}
ip->bufp = bp;
/* after this returns, rescan will exit because ip->bufp
now points to the end of the buffer.
rescan is responsible for the error message also. */
done:
if (output_conditionals && op != 0) {
char *ptr = "#endfailed\n";
int len = strlen (ptr);
if (op->bufp > op->buf && op->bufp[-1] != '\n')
{
*op->bufp++ = '\n';
op->lineno++;
}
check_expand (op, beg_of_line - beg_of_group);
bcopy ((char *) beg_of_group, (char *) op->bufp,
beg_of_line - beg_of_group);
op->bufp += beg_of_line - beg_of_group;
op->lineno += ip->lineno - beg_lineno;
check_expand (op, len);
bcopy (ptr, (char *) op->bufp, len);
op->bufp += len;
op->lineno++;
}
}
/*
@ -7104,7 +7228,7 @@ do_else (buf, limit, op, keyword)
}
if (if_stack->if_succeeded)
skip_if_group (ip, 0);
skip_if_group (ip, 0, op);
else {
++if_stack->if_succeeded; /* continue processing input */
output_line_command (ip, op, 1, same_file);
@ -8383,6 +8507,39 @@ change_newlines (start, length)
return obp - start;
}
/*
* my_strerror - return the descriptive text associated with an `errno' code.
*/
char *
my_strerror (errnum)
int errnum;
{
char *result;
#ifndef VMS
#ifndef HAVE_STRERROR
result = (char *) ((errnum < sys_nerr) ? sys_errlist[errnum] : 0);
#else
result = strerror (errnum);
#endif
#else /* VMS */
/* VAXCRTL's strerror() takes an optional second argument, which only
matters when the first argument is EVMSERR. However, it's simplest
just to pass it unconditionally. `vaxc$errno' is declared in
<errno.h>, and maintained by the library in parallel with `errno'.
We assume that caller's `errnum' either matches the last setting of
`errno' by the library or else does not have the value `EVMSERR'. */
result = strerror (errnum, vaxc$errno);
#endif
if (!result)
result = "undocumented I/O error";
return result;
}
/*
* error - print error message and increment count of errors.
*/
@ -8430,10 +8587,7 @@ error_from_errno (name)
if (ip != NULL)
fprintf (stderr, "%s:%d: ", ip->nominal_fname, ip->lineno);
if (errno < sys_nerr)
fprintf (stderr, "%s: %s\n", name, sys_errlist[errno]);
else
fprintf (stderr, "%s: undocumented I/O error\n", name);
fprintf (stderr, "%s: %s\n", name, my_strerror (errno));
errors++;
}
@ -8929,15 +9083,16 @@ dump_defn_1 (base, start, length, of)
U_CHAR *limit = base + start + length;
while (p < limit) {
if (*p != '\n')
putc (*p, of);
else if (*p == '\"' || *p =='\'') {
if (*p == '\"' || *p =='\'') {
U_CHAR *p1 = skip_quoted_string (p, limit, 0, NULL_PTR,
NULL_PTR, NULL_PTR);
fwrite (p, p1 - p, 1, of);
p = p1 - 1;
p = p1;
} else {
if (*p != '\n')
putc (*p, of);
p++;
}
p++;
}
}
@ -9012,29 +9167,29 @@ initialize_builtins (inp, outp)
FILE_BUF *inp;
FILE_BUF *outp;
{
install ("__LINE__", -1, T_SPECLINE, 0, 0, -1);
install ("__DATE__", -1, T_DATE, 0, 0, -1);
install ("__FILE__", -1, T_FILE, 0, 0, -1);
install ("__BASE_FILE__", -1, T_BASE_FILE, 0, 0, -1);
install ("__INCLUDE_LEVEL__", -1, T_INCLUDE_LEVEL, 0, 0, -1);
install ("__VERSION__", -1, T_VERSION, 0, 0, -1);
install ("__LINE__", -1, T_SPECLINE, 0, NULL_PTR, -1);
install ("__DATE__", -1, T_DATE, 0, NULL_PTR, -1);
install ("__FILE__", -1, T_FILE, 0, NULL_PTR, -1);
install ("__BASE_FILE__", -1, T_BASE_FILE, 0, NULL_PTR, -1);
install ("__INCLUDE_LEVEL__", -1, T_INCLUDE_LEVEL, 0, NULL_PTR, -1);
install ("__VERSION__", -1, T_VERSION, 0, NULL_PTR, -1);
#ifndef NO_BUILTIN_SIZE_TYPE
install ("__SIZE_TYPE__", -1, T_SIZE_TYPE, 0, 0, -1);
install ("__SIZE_TYPE__", -1, T_SIZE_TYPE, 0, NULL_PTR, -1);
#endif
#ifndef NO_BUILTIN_PTRDIFF_TYPE
install ("__PTRDIFF_TYPE__ ", -1, T_PTRDIFF_TYPE, 0, 0, -1);
install ("__PTRDIFF_TYPE__ ", -1, T_PTRDIFF_TYPE, 0, NULL_PTR, -1);
#endif
install ("__WCHAR_TYPE__", -1, T_WCHAR_TYPE, 0, 0, -1);
install ("__USER_LABEL_PREFIX__", -1, T_USER_LABEL_PREFIX_TYPE, 0, 0, -1);
install ("__REGISTER_PREFIX__", -1, T_REGISTER_PREFIX_TYPE, 0, 0, -1);
install ("__TIME__", -1, T_TIME, 0, 0, -1);
install ("__WCHAR_TYPE__", -1, T_WCHAR_TYPE, 0, NULL_PTR, -1);
install ("__USER_LABEL_PREFIX__",-1,T_USER_LABEL_PREFIX_TYPE,0,NULL_PTR, -1);
install ("__REGISTER_PREFIX__", -1, T_REGISTER_PREFIX_TYPE, 0, NULL_PTR, -1);
install ("__TIME__", -1, T_TIME, 0, NULL_PTR, -1);
if (!traditional)
install ("__STDC__", -1, T_CONST, STDC_VALUE, 0, -1);
install ("__STDC__", -1, T_CONST, STDC_VALUE, NULL_PTR, -1);
if (objc)
install ("__OBJC__", -1, T_CONST, 1, 0, -1);
install ("__OBJC__", -1, T_CONST, 1, NULL_PTR, -1);
/* This is supplied using a -D by the compiler driver
so that it is present only when truly compiling with GNU C. */
/* install ("__GNUC__", -1, T_CONST, 2, 0, -1); */
/* install ("__GNUC__", -1, T_CONST, 2, NULL_PTR, -1); */
if (debug_output)
{
@ -9119,6 +9274,12 @@ make_definition (str, op)
}
while (is_idchar[*++p])
;
if (*p == '(') {
while (is_idchar[*++p] || *p == ',' || is_hor_space[*p])
;
if (*p++ != ')')
p = str; /* Error */
}
if (*p == 0) {
buf = (U_CHAR *) alloca (p - buf + 4);
strcpy ((char *)buf, str);
@ -9137,7 +9298,18 @@ make_definition (str, op)
p++;
q = &buf[p - str];
while (*p) {
if (*p == '\\' && p[1] == '\n')
if (*p == '\"' || *p == '\'') {
int unterminated = 0;
U_CHAR *p1 = skip_quoted_string (p, p + strlen (p), 0,
NULL_PTR, NULL_PTR, &unterminated);
if (unterminated)
return;
while (p != p1)
if (*p == '\\' && p[1] == '\n')
p += 2;
else
*q++ = *p++;
} else if (*p == '\\' && p[1] == '\n')
p += 2;
/* Change newline chars into newline-markers. */
else if (*p == '\n')
@ -9167,7 +9339,7 @@ make_definition (str, op)
;
/* Pass NULL instead of OP, since this is a "predefined" macro. */
do_define (buf, buf + strlen (buf), NULL, kt);
do_define (buf, buf + strlen (buf), NULL_PTR, kt);
--indepth;
}
@ -9392,10 +9564,7 @@ perror_with_name (name)
char *name;
{
fprintf (stderr, "%s: ", progname);
if (errno < sys_nerr)
fprintf (stderr, "%s: %s\n", name, sys_errlist[errno]);
else
fprintf (stderr, "%s: undocumented I/O error\n", name);
fprintf (stderr, "%s: %s\n", name, my_strerror (errno));
errors++;
}

View File

@ -55,13 +55,27 @@ struct arglist {
#endif
#endif
/* Find the largest host integer type and set its size and type. */
#ifndef HOST_BITS_PER_WIDE_INT
#if HOST_BITS_PER_LONG > HOST_BITS_PER_INT
#define HOST_BITS_PER_WIDE_INT HOST_BITS_PER_LONG
#define HOST_WIDE_INT long
#else
#define HOST_BITS_PER_WIDE_INT HOST_BITS_PER_INT
#define HOST_WIDE_INT int
#endif
#endif
#ifndef NULL_PTR
#define NULL_PTR ((GENERIC_PTR)0)
#endif
int yylex ();
void yyerror ();
int expression_value;
HOST_WIDE_INT expression_value;
static jmp_buf parse_return_error;
@ -119,7 +133,7 @@ static void integer_overflow ();
static long left_shift ();
static long right_shift ();
#line 127 "cexp.y"
#line 141 "cexp.y"
typedef union {
struct constant {long value; int unsignedp;} integer;
struct name {U_CHAR *address; int length;} name;
@ -218,10 +232,10 @@ static const short yyrhs[] = { 35,
#if YYDEBUG != 0
static const short yyrline[] = { 0,
159, 164, 165, 172, 177, 180, 182, 185, 189, 191,
196, 201, 213, 228, 239, 246, 253, 259, 265, 268,
271, 277, 283, 289, 295, 298, 301, 304, 307, 310,
313, 315, 317, 322, 324, 337
173, 178, 179, 186, 191, 194, 196, 199, 203, 205,
210, 215, 227, 242, 253, 260, 267, 273, 279, 282,
285, 291, 297, 303, 309, 312, 315, 318, 321, 324,
327, 329, 331, 336, 338, 351
};
static const char * const yytname[] = { "$","error","$illegal.","INT","CHAR",
@ -805,59 +819,59 @@ yyparse(YYPARSE_PARAM)
switch (yyn) {
case 1:
#line 160 "cexp.y"
#line 174 "cexp.y"
{ expression_value = yyvsp[0].integer.value; ;
break;}
case 3:
#line 166 "cexp.y"
#line 180 "cexp.y"
{ if (pedantic)
pedwarn ("comma operator in operand of `#if'");
yyval.integer = yyvsp[0].integer; ;
break;}
case 4:
#line 173 "cexp.y"
#line 187 "cexp.y"
{ yyval.integer.value = - yyvsp[0].integer.value;
if ((yyval.integer.value & yyvsp[0].integer.value) < 0 && ! yyvsp[0].integer.unsignedp)
integer_overflow ();
yyval.integer.unsignedp = yyvsp[0].integer.unsignedp; ;
break;}
case 5:
#line 178 "cexp.y"
#line 192 "cexp.y"
{ yyval.integer.value = ! yyvsp[0].integer.value;
yyval.integer.unsignedp = 0; ;
break;}
case 6:
#line 181 "cexp.y"
#line 195 "cexp.y"
{ yyval.integer = yyvsp[0].integer; ;
break;}
case 7:
#line 183 "cexp.y"
#line 197 "cexp.y"
{ yyval.integer.value = ~ yyvsp[0].integer.value;
yyval.integer.unsignedp = yyvsp[0].integer.unsignedp; ;
break;}
case 8:
#line 186 "cexp.y"
#line 200 "cexp.y"
{ yyval.integer.value = check_assertion (yyvsp[0].name.address, yyvsp[0].name.length,
0, NULL_PTR);
yyval.integer.unsignedp = 0; ;
break;}
case 9:
#line 190 "cexp.y"
#line 204 "cexp.y"
{ keyword_parsing = 1; ;
break;}
case 10:
#line 192 "cexp.y"
#line 206 "cexp.y"
{ yyval.integer.value = check_assertion (yyvsp[-4].name.address, yyvsp[-4].name.length,
1, yyvsp[-1].keywords);
keyword_parsing = 0;
yyval.integer.unsignedp = 0; ;
break;}
case 11:
#line 197 "cexp.y"
#line 211 "cexp.y"
{ yyval.integer = yyvsp[-1].integer; ;
break;}
case 12:
#line 202 "cexp.y"
#line 216 "cexp.y"
{ yyval.integer.unsignedp = yyvsp[-2].integer.unsignedp || yyvsp[0].integer.unsignedp;
if (yyval.integer.unsignedp)
yyval.integer.value = (unsigned long) yyvsp[-2].integer.value * yyvsp[0].integer.value;
@ -871,7 +885,7 @@ case 12:
} ;
break;}
case 13:
#line 214 "cexp.y"
#line 228 "cexp.y"
{ if (yyvsp[0].integer.value == 0)
{
error ("division by zero in #if");
@ -888,7 +902,7 @@ case 13:
} ;
break;}
case 14:
#line 229 "cexp.y"
#line 243 "cexp.y"
{ if (yyvsp[0].integer.value == 0)
{
error ("division by zero in #if");
@ -901,7 +915,7 @@ case 14:
yyval.integer.value = yyvsp[-2].integer.value % yyvsp[0].integer.value; ;
break;}
case 15:
#line 240 "cexp.y"
#line 254 "cexp.y"
{ yyval.integer.value = yyvsp[-2].integer.value + yyvsp[0].integer.value;
yyval.integer.unsignedp = yyvsp[-2].integer.unsignedp || yyvsp[0].integer.unsignedp;
if (! yyval.integer.unsignedp
@ -910,7 +924,7 @@ case 15:
integer_overflow (); ;
break;}
case 16:
#line 247 "cexp.y"
#line 261 "cexp.y"
{ yyval.integer.value = yyvsp[-2].integer.value - yyvsp[0].integer.value;
yyval.integer.unsignedp = yyvsp[-2].integer.unsignedp || yyvsp[0].integer.unsignedp;
if (! yyval.integer.unsignedp
@ -919,7 +933,7 @@ case 16:
integer_overflow (); ;
break;}
case 17:
#line 254 "cexp.y"
#line 268 "cexp.y"
{ yyval.integer.unsignedp = yyvsp[-2].integer.unsignedp;
if (yyvsp[0].integer.value < 0 && ! yyvsp[0].integer.unsignedp)
yyval.integer.value = right_shift (&yyvsp[-2].integer, -yyvsp[0].integer.value);
@ -927,7 +941,7 @@ case 17:
yyval.integer.value = left_shift (&yyvsp[-2].integer, yyvsp[0].integer.value); ;
break;}
case 18:
#line 260 "cexp.y"
#line 274 "cexp.y"
{ yyval.integer.unsignedp = yyvsp[-2].integer.unsignedp;
if (yyvsp[0].integer.value < 0 && ! yyvsp[0].integer.unsignedp)
yyval.integer.value = left_shift (&yyvsp[-2].integer, -yyvsp[0].integer.value);
@ -935,17 +949,17 @@ case 18:
yyval.integer.value = right_shift (&yyvsp[-2].integer, yyvsp[0].integer.value); ;
break;}
case 19:
#line 266 "cexp.y"
#line 280 "cexp.y"
{ yyval.integer.value = (yyvsp[-2].integer.value == yyvsp[0].integer.value);
yyval.integer.unsignedp = 0; ;
break;}
case 20:
#line 269 "cexp.y"
#line 283 "cexp.y"
{ yyval.integer.value = (yyvsp[-2].integer.value != yyvsp[0].integer.value);
yyval.integer.unsignedp = 0; ;
break;}
case 21:
#line 272 "cexp.y"
#line 286 "cexp.y"
{ yyval.integer.unsignedp = 0;
if (yyvsp[-2].integer.unsignedp || yyvsp[0].integer.unsignedp)
yyval.integer.value = (unsigned long) yyvsp[-2].integer.value <= yyvsp[0].integer.value;
@ -953,7 +967,7 @@ case 21:
yyval.integer.value = yyvsp[-2].integer.value <= yyvsp[0].integer.value; ;
break;}
case 22:
#line 278 "cexp.y"
#line 292 "cexp.y"
{ yyval.integer.unsignedp = 0;
if (yyvsp[-2].integer.unsignedp || yyvsp[0].integer.unsignedp)
yyval.integer.value = (unsigned long) yyvsp[-2].integer.value >= yyvsp[0].integer.value;
@ -961,7 +975,7 @@ case 22:
yyval.integer.value = yyvsp[-2].integer.value >= yyvsp[0].integer.value; ;
break;}
case 23:
#line 284 "cexp.y"
#line 298 "cexp.y"
{ yyval.integer.unsignedp = 0;
if (yyvsp[-2].integer.unsignedp || yyvsp[0].integer.unsignedp)
yyval.integer.value = (unsigned long) yyvsp[-2].integer.value < yyvsp[0].integer.value;
@ -969,7 +983,7 @@ case 23:
yyval.integer.value = yyvsp[-2].integer.value < yyvsp[0].integer.value; ;
break;}
case 24:
#line 290 "cexp.y"
#line 304 "cexp.y"
{ yyval.integer.unsignedp = 0;
if (yyvsp[-2].integer.unsignedp || yyvsp[0].integer.unsignedp)
yyval.integer.value = (unsigned long) yyvsp[-2].integer.value > yyvsp[0].integer.value;
@ -977,54 +991,54 @@ case 24:
yyval.integer.value = yyvsp[-2].integer.value > yyvsp[0].integer.value; ;
break;}
case 25:
#line 296 "cexp.y"
#line 310 "cexp.y"
{ yyval.integer.value = yyvsp[-2].integer.value & yyvsp[0].integer.value;
yyval.integer.unsignedp = yyvsp[-2].integer.unsignedp || yyvsp[0].integer.unsignedp; ;
break;}
case 26:
#line 299 "cexp.y"
#line 313 "cexp.y"
{ yyval.integer.value = yyvsp[-2].integer.value ^ yyvsp[0].integer.value;
yyval.integer.unsignedp = yyvsp[-2].integer.unsignedp || yyvsp[0].integer.unsignedp; ;
break;}
case 27:
#line 302 "cexp.y"
#line 316 "cexp.y"
{ yyval.integer.value = yyvsp[-2].integer.value | yyvsp[0].integer.value;
yyval.integer.unsignedp = yyvsp[-2].integer.unsignedp || yyvsp[0].integer.unsignedp; ;
break;}
case 28:
#line 305 "cexp.y"
#line 319 "cexp.y"
{ yyval.integer.value = (yyvsp[-2].integer.value && yyvsp[0].integer.value);
yyval.integer.unsignedp = 0; ;
break;}
case 29:
#line 308 "cexp.y"
#line 322 "cexp.y"
{ yyval.integer.value = (yyvsp[-2].integer.value || yyvsp[0].integer.value);
yyval.integer.unsignedp = 0; ;
break;}
case 30:
#line 311 "cexp.y"
#line 325 "cexp.y"
{ yyval.integer.value = yyvsp[-4].integer.value ? yyvsp[-2].integer.value : yyvsp[0].integer.value;
yyval.integer.unsignedp = yyvsp[-2].integer.unsignedp || yyvsp[0].integer.unsignedp; ;
break;}
case 31:
#line 314 "cexp.y"
#line 328 "cexp.y"
{ yyval.integer = yylval.integer; ;
break;}
case 32:
#line 316 "cexp.y"
#line 330 "cexp.y"
{ yyval.integer = yylval.integer; ;
break;}
case 33:
#line 318 "cexp.y"
#line 332 "cexp.y"
{ yyval.integer.value = 0;
yyval.integer.unsignedp = 0; ;
break;}
case 34:
#line 323 "cexp.y"
#line 337 "cexp.y"
{ yyval.keywords = 0; ;
break;}
case 35:
#line 325 "cexp.y"
#line 339 "cexp.y"
{ struct arglist *temp;
yyval.keywords = (struct arglist *) xmalloc (sizeof (struct arglist));
yyval.keywords->next = yyvsp[-2].keywords;
@ -1039,7 +1053,7 @@ case 35:
temp->next->length = 1; ;
break;}
case 36:
#line 338 "cexp.y"
#line 352 "cexp.y"
{ yyval.keywords = (struct arglist *) xmalloc (sizeof (struct arglist));
yyval.keywords->name = yyvsp[-1].name.address;
yyval.keywords->length = yyvsp[-1].name.length;
@ -1243,7 +1257,7 @@ case 36:
yystate = yyn;
goto yynewstate;
}
#line 343 "cexp.y"
#line 357 "cexp.y"
/* During parsing of a C expression, the pointer to the next character
@ -1809,7 +1823,7 @@ right_shift (a, b)
/* We do not support C comments. They should be removed before
this function is called. */
int
HOST_WIDE_INT
parse_c_expression (string)
char *string;
{
@ -1858,7 +1872,7 @@ main ()
if (buf[n] == EOF)
break;
buf[n] = '\0';
printf ("parser returned %d\n", parse_c_expression (buf));
printf ("parser returned %ld\n", parse_c_expression (buf));
}
return 0;

View File

@ -372,6 +372,13 @@ extern rtx bcmp_libfunc;
extern rtx memset_libfunc;
extern rtx bzero_libfunc;
extern rtx eqhf2_libfunc;
extern rtx nehf2_libfunc;
extern rtx gthf2_libfunc;
extern rtx gehf2_libfunc;
extern rtx lthf2_libfunc;
extern rtx lehf2_libfunc;
extern rtx eqsf2_libfunc;
extern rtx nesf2_libfunc;
extern rtx gtsf2_libfunc;

View File

@ -63,18 +63,23 @@ extern void __do_global_dtors ();
we define it once here as a macro to avoid various instances getting
out-of-sync with one another. */
/* The first word may or may not contain the number of pointers in the table.
/* Some systems place the number of pointers
in the first word of the table.
On other systems, that word is -1.
In all cases, the table is null-terminated.
We ignore the first word and scan up to the null. */
If the length is not recorded, count up to the null. */
/* Some systems use a different strategy for finding the ctors.
For example, svr3. */
#ifndef DO_GLOBAL_CTORS_BODY
#define DO_GLOBAL_CTORS_BODY \
do { \
func_ptr *p; \
for (p = __CTOR_LIST__ + 1; *p; ) \
(*p++) (); \
} while (0)
unsigned long nptrs = (unsigned long) __CTOR_LIST__[0]; \
unsigned i; \
if (nptrs == -1) \
for (nptrs = 0; __CTOR_LIST__[nptrs + 1] != 0; nptrs++); \
for (i = nptrs; i >= 1; i--) \
__CTOR_LIST__[i] (); \
} while (0)
#endif

View File

@ -1,4 +1,4 @@
/* Definitions of target machine for GNU compiler for Intel 80386.
/* Definitions of target machine for GNU compiler for Intel X86 (386, 486, pentium)
Copyright (C) 1988, 1992, 1994 Free Software Foundation, Inc.
This file is part of GNU CC.
@ -62,77 +62,130 @@ extern int target_flags;
#define TARGET_CPU_DEFAULT 0
#endif
/* Compile 80387 insns for floating point (not library calls). */
#define TARGET_80387 (target_flags & 1)
/* Compile code for an i486. */
#define TARGET_486 (target_flags & 2)
/* Masks for the -m switches */
#define MASK_80387 000000000001 /* Hardware floating point */
#define MASK_486 000000000002 /* 80486 specific */
#define MASK_NOTUSED 000000000004 /* bit not currently used */
#define MASK_RTD 000000000010 /* Use ret that pops args */
#define MASK_REGPARM 000000000020 /* Pass args in eax, edx */
#define MASK_SVR3_SHLIB 000000000040 /* Uninit locals into bss */
#define MASK_IEEE_FP 000000000100 /* IEEE fp comparisons */
#define MASK_FLOAT_RETURNS 000000000200 /* Return float in st(0) */
#define MASK_NO_FANCY_MATH_387 000000000400 /* Disable sin, cos, sqrt */
/* Temporary codegen switches */
#define MASK_DEBUG_ADDR 000001000000 /* Debug GO_IF_LEGITIMATE_ADDRESS */
#define MASK_NO_WIDE_MULTIPLY 000002000000 /* Disable 32x32->64 multiplies */
#define MASK_NO_MOVE 000004000000 /* Don't generate mem->mem */
/* Use the floating point instructions */
#define TARGET_80387 (target_flags & MASK_80387)
/* Compile using ret insn that pops args.
This will not work unless you use prototypes at least
for all functions that can take varying numbers of args. */
#define TARGET_RTD (target_flags & 8)
#define TARGET_RTD (target_flags & MASK_RTD)
/* Compile passing first two args in regs 0 and 1.
This exists only to test compiler features that will
be needed for RISC chips. It is not usable
and is not intended to be usable on this cpu. */
#define TARGET_REGPARM (target_flags & 020)
#define TARGET_REGPARM (target_flags & MASK_RTD)
/* Put uninitialized locals into bss, not data.
Meaningful only on svr3. */
#define TARGET_SVR3_SHLIB (target_flags & 040)
#define TARGET_SVR3_SHLIB (target_flags & MASK_SVR3_SHLIB)
/* Use IEEE floating point comparisons. These handle correctly the cases
where the result of a comparison is unordered. Normally SIGFPE is
generated in such cases, in which case this isn't needed. */
#define TARGET_IEEE_FP (target_flags & 0100)
#define TARGET_IEEE_FP (target_flags & MASK_IEEE_FP)
/* Functions that return a floating point value may return that value
in the 387 FPU or in 386 integer registers. If set, this flag causes
the 387 to be used, which is compatible with most calling conventions. */
#define TARGET_FLOAT_RETURNS_IN_80387 (target_flags & 0200)
#define TARGET_FLOAT_RETURNS_IN_80387 (target_flags & MASK_FLOAT_RETURNS)
/* Disable generation of FP sin, cos and sqrt operations for 387.
This is because FreeBSD lacks these in the math-emulator-code */
#define TARGET_NO_FANCY_MATH_387 (target_flags & 0400)
#define TARGET_NO_FANCY_MATH_387 (target_flags & MASK_NO_FANCY_MATH_387)
/* Macro to define tables used to set the flags.
This is a list in braces of pairs in braces,
each pair being { "NAME", VALUE }
where VALUE is the bits to set or minus the bits to clear.
An empty string NAME is used to identify the default VALUE. */
/* Temporary switches for tuning code generation */
#define TARGET_SWITCHES \
{ { "80387", 1}, \
{ "no-80387", -1}, \
{ "soft-float", -1}, \
{ "no-soft-float", 1}, \
{ "486", 2}, \
{ "no-486", -2}, \
{ "386", -2}, \
{ "rtd", 8}, \
{ "no-rtd", -8}, \
{ "regparm", 020}, \
{ "no-regparm", -020}, \
{ "svr3-shlib", 040}, \
{ "no-svr3-shlib", -040}, \
{ "ieee-fp", 0100}, \
{ "no-ieee-fp", -0100}, \
{ "fp-ret-in-387", 0200}, \
{ "no-fp-ret-in-387", -0200}, \
{ "no-fancy-math-387", 0400}, \
{ "fancy-math-387", -0400}, \
SUBTARGET_SWITCHES \
{ "", TARGET_DEFAULT | TARGET_CPU_DEFAULT}}
/* Disable 32x32->64 bit multiplies that are used for long long multiplies
and division by constants, but sometimes cause reload problems. */
#define TARGET_NO_WIDE_MULTIPLY (target_flags & MASK_NO_WIDE_MULTIPLY)
#define TARGET_WIDE_MULTIPLY (!TARGET_NO_WIDE_MULTIPLY)
/* This is meant to be redefined in the host dependent files */
/* Debug GO_IF_LEGITIMATE_ADDRESS */
#define TARGET_DEBUG_ADDR (target_flags & MASK_DEBUG_ADDR)
/* Hack macros for tuning code generation */
#define TARGET_MOVE ((target_flags & MASK_NO_MOVE) == 0) /* Don't generate memory->memory */
/* Specific hardware switches */
#define TARGET_486 (target_flags & MASK_486) /* 80486DX, 80486SX, 80486DX[24] */
#define TARGET_386 (!TARGET_486) /* 80386 */
#define TARGET_SWITCHES \
{ { "80387", MASK_80387 }, \
{ "no-80387", -MASK_80387 }, \
{ "hard-float", MASK_80387 }, \
{ "soft-float", -MASK_80387 }, \
{ "no-soft-float", MASK_80387 }, \
{ "386", -MASK_486 }, \
{ "no-386", MASK_486 }, \
{ "486", MASK_486 }, \
{ "no-486", -MASK_486 }, \
{ "rtd", MASK_RTD }, \
{ "no-rtd", -MASK_RTD }, \
{ "regparm", MASK_REGPARM }, \
{ "no-regparm", -MASK_REGPARM }, \
{ "svr3-shlib", MASK_SVR3_SHLIB }, \
{ "no-svr3-shlib", -MASK_SVR3_SHLIB }, \
{ "ieee-fp", MASK_IEEE_FP }, \
{ "no-ieee-fp", -MASK_IEEE_FP }, \
{ "fp-ret-in-387", MASK_FLOAT_RETURNS }, \
{ "no-fp-ret-in-387", -MASK_FLOAT_RETURNS }, \
{ "no-fancy-math-387", MASK_NO_FANCY_MATH_387 }, \
{ "fancy-math-387", -MASK_NO_FANCY_MATH_387 }, \
{ "no-wide-multiply", MASK_NO_WIDE_MULTIPLY }, \
{ "wide-multiply", -MASK_NO_WIDE_MULTIPLY }, \
{ "debug-addr", MASK_DEBUG_ADDR }, \
{ "no-debug-addr", -MASK_DEBUG_ADDR }, \
{ "move", -MASK_NO_MOVE }, \
{ "no-move", MASK_NO_MOVE }, \
SUBTARGET_SWITCHES \
{ "", TARGET_DEFAULT | TARGET_CPU_DEFAULT}}
/* This macro is similar to `TARGET_SWITCHES' but defines names of
command options that have values. Its definition is an
initializer with a subgrouping for each command option.
Each subgrouping contains a string constant, that defines the
fixed part of the option name, and the address of a variable. The
variable, type `char *', is set to the variable part of the given
option if the fixed part matches. The actual option name is made
by appending `-m' to the specified name. */
#define TARGET_OPTIONS \
{ { "reg-alloc=", &i386_reg_alloc_order }, \
SUBTARGET_OPTIONS }
/* Sometimes certain combinations of command options do not make
sense on a particular target machine. You can define a macro
`OVERRIDE_OPTIONS' to take account of this. This macro, if
defined, is executed once just after all the command options have
been parsed.
Don't use this macro to turn on various extra optimizations for
`-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
#define OVERRIDE_OPTIONS override_options ()
/* These are meant to be redefined in the host dependent files */
#define SUBTARGET_SWITCHES
#define SUBTARGET_OPTIONS
#define OVERRIDE_OPTIONS \
{ \
SUBTARGET_OVERRIDE_OPTIONS \
}
/* This is meant to be redefined in the host dependent files */
#define SUBTARGET_OVERRIDE_OPTIONS
/* target machine storage layout */
@ -263,11 +316,39 @@ extern int target_flags;
listed once, even those in FIXED_REGISTERS. List frame pointer
late and fixed registers last. Note that, in general, we prefer
registers listed in CALL_USED_REGISTERS, keeping the others
available for storage of persistent values. */
available for storage of persistent values.
Three different versions of REG_ALLOC_ORDER have been tried:
If the order is edx, ecx, eax, ... it produces a slightly faster compiler,
but slower code on simple functions returning values in eax.
If the order is eax, ecx, edx, ... it causes reload to abort when compiling
perl 4.036 due to not being able to create a DImode register (to hold a 2
word union).
If the order is eax, edx, ecx, ... it produces better code for simple
functions, and a slightly slower compiler. Users complained about the code
generated by allocating edx first, so restore the 'natural' order of things. */
#define REG_ALLOC_ORDER \
/*ax,cx,dx,bx,si,di,bp,sp,st,st1,st2,st3,st4,st5,st6,st7,arg*/ \
{ 0, 2, 1, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 }
/*ax,dx,cx,bx,si,di,bp,sp,st,st1,st2,st3,st4,st5,st6,st7,arg*/ \
{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 }
/* A C statement (sans semicolon) to choose the order in which to
allocate hard registers for pseudo-registers local to a basic
block.
Store the desired register order in the array `reg_alloc_order'.
Element 0 should be the register to allocate first; element 1, the
next register; and so on.
The macro body should not assume anything about the contents of
`reg_alloc_order' before execution of the macro.
On most machines, it is not necessary to define this macro. */
#define ORDER_REGS_FOR_LOCAL_ALLOC order_regs_for_local_alloc ()
/* Macro to conditionally modify fixed_regs/call_used_regs. */
#define CONDITIONAL_REGISTER_USAGE \
@ -381,6 +462,27 @@ extern int target_flags;
/* Place in which caller passes the structure value address.
0 means push the value on the stack like an argument. */
#define STRUCT_VALUE 0
/* A C expression which can inhibit the returning of certain function
values in registers, based on the type of value. A nonzero value
says to return the function value in memory, just as large
structures are always returned. Here TYPE will be a C expression
of type `tree', representing the data type of the value.
Note that values of mode `BLKmode' must be explicitly handled by
this macro. Also, the option `-fpcc-struct-return' takes effect
regardless of this macro. On most systems, it is possible to
leave the macro undefined; this causes a default definition to be
used, whose value is the constant 1 for `BLKmode' values, and 0
otherwise.
Do not use this macro to indicate that structures and unions
should always be returned in memory. You should instead use
`DEFAULT_PCC_STRUCT_RETURN' to indicate this. */
#define RETURN_IN_MEMORY(TYPE) \
((TYPE_MODE (TYPE) == BLKmode) || int_size_in_bytes (TYPE) > 12)
/* Define the classes of registers for register constraints in the
machine description. Also define ranges of constants.
@ -448,7 +550,7 @@ enum reg_class
0x3, /* AD_REGS */ \
0xf, /* Q_REGS */ \
0x10, 0x20, /* SIREG, DIREG */ \
0x1007f, /* INDEX_REGS */ \
0x07f, /* INDEX_REGS */ \
0x100ff, /* GENERAL_REGS */ \
0x0100, 0x0200, /* FP_TOP_REG, FP_SECOND_REG */ \
0xff00, /* FLOAT_REGS */ \
@ -459,7 +561,6 @@ enum reg_class
reg number REGNO. This could be a conditional expression
or could index an array. */
extern enum reg_class regclass_map[FIRST_PSEUDO_REGISTER];
#define REGNO_REG_CLASS(REGNO) (regclass_map[REGNO])
/* When defined, the compiler allows registers explicitly used in the
@ -591,6 +692,32 @@ extern enum reg_class regclass_map[FIRST_PSEUDO_REGISTER];
#define CLASS_MAX_NREGS(CLASS, MODE) \
(FLOAT_CLASS_P (CLASS) ? 1 : \
((GET_MODE_SIZE (MODE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
/* A C expression whose value is nonzero if pseudos that have been
assigned to registers of class CLASS would likely be spilled
because registers of CLASS are needed for spill registers.
The default value of this macro returns 1 if CLASS has exactly one
register and zero otherwise. On most machines, this default
should be used. Only define this macro to some other expression
if pseudo allocated by `local-alloc.c' end up in memory because
their hard registers were needed for spill regisers. If this
macro returns nonzero for those classes, those pseudos will only
be allocated by `global.c', which knows how to reallocate the
pseudo to another register. If there would not be another
register available for reallocation, you should not change the
definition of this macro since the only effect of such a
definition would be to slow down register allocation. */
#define CLASS_LIKELY_SPILLED_P(CLASS) \
(((CLASS) == AREG) \
|| ((CLASS) == DREG) \
|| ((CLASS) == CREG) \
|| ((CLASS) == BREG) \
|| ((CLASS) == AD_REGS) \
|| ((CLASS) == SIREG) \
|| ((CLASS) == DIREG))
/* Stack layout; function entry, exit and calling. */
@ -762,6 +889,102 @@ extern enum reg_class regclass_map[FIRST_PSEUDO_REGISTER];
} \
}
/* A C statement or compound statement to output to FILE some
assembler code to initialize basic-block profiling for the current
object module. This code should call the subroutine
`__bb_init_func' once per object module, passing it as its sole
argument the address of a block allocated in the object module.
The name of the block is a local symbol made with this statement:
ASM_GENERATE_INTERNAL_LABEL (BUFFER, "LPBX", 0);
Of course, since you are writing the definition of
`ASM_GENERATE_INTERNAL_LABEL' as well as that of this macro, you
can take a short cut in the definition of this macro and use the
name that you know will result.
The first word of this block is a flag which will be nonzero if the
object module has already been initialized. So test this word
first, and do not call `__bb_init_func' if the flag is nonzero. */
#undef FUNCTION_BLOCK_PROFILER
#define FUNCTION_BLOCK_PROFILER(STREAM, LABELNO) \
do \
{ \
static int num_func = 0; \
rtx xops[8]; \
char block_table[80], false_label[80]; \
\
ASM_GENERATE_INTERNAL_LABEL (block_table, "LPBX", 0); \
ASM_GENERATE_INTERNAL_LABEL (false_label, "LPBZ", num_func); \
\
xops[0] = const0_rtx; \
xops[1] = gen_rtx (SYMBOL_REF, VOIDmode, block_table); \
xops[2] = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, VOIDmode, false_label)); \
xops[3] = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, VOIDmode, "__bb_init_func")); \
xops[4] = gen_rtx (MEM, Pmode, xops[1]); \
xops[5] = stack_pointer_rtx; \
xops[6] = GEN_INT (4); \
xops[7] = gen_rtx (REG, Pmode, 0); /* eax */ \
\
CONSTANT_POOL_ADDRESS_P (xops[1]) = TRUE; \
CONSTANT_POOL_ADDRESS_P (xops[2]) = TRUE; \
\
output_asm_insn (AS2(cmp%L4,%0,%4), xops); \
output_asm_insn (AS1(jne,%2), xops); \
\
if (!flag_pic) \
output_asm_insn (AS1(push%L1,%1), xops); \
else \
{ \
output_asm_insn (AS2 (lea%L7,%a1,%7), xops); \
output_asm_insn (AS1 (push%L7,%7), xops); \
} \
\
output_asm_insn (AS1(call,%P3), xops); \
output_asm_insn (AS2(add%L0,%6,%5), xops); \
ASM_OUTPUT_INTERNAL_LABEL (STREAM, "LPBZ", num_func); \
num_func++; \
} \
while (0)
/* A C statement or compound statement to increment the count
associated with the basic block number BLOCKNO. Basic blocks are
numbered separately from zero within each compilation. The count
associated with block number BLOCKNO is at index BLOCKNO in a
vector of words; the name of this array is a local symbol made
with this statement:
ASM_GENERATE_INTERNAL_LABEL (BUFFER, "LPBX", 2);
Of course, since you are writing the definition of
`ASM_GENERATE_INTERNAL_LABEL' as well as that of this macro, you
can take a short cut in the definition of this macro and use the
name that you know will result. */
#define BLOCK_PROFILER(STREAM, BLOCKNO) \
do \
{ \
rtx xops[1], cnt_rtx; \
char counts[80]; \
\
ASM_GENERATE_INTERNAL_LABEL (counts, "LPBX", 2); \
cnt_rtx = gen_rtx (SYMBOL_REF, VOIDmode, counts); \
SYMBOL_REF_FLAG (cnt_rtx) = TRUE; \
\
if (BLOCKNO) \
cnt_rtx = plus_constant (cnt_rtx, (BLOCKNO)*4); \
\
if (flag_pic) \
cnt_rtx = gen_rtx (PLUS, Pmode, pic_offset_table_rtx, cnt_rtx); \
\
xops[0] = gen_rtx (MEM, SImode, cnt_rtx); \
output_asm_insn (AS1(inc%L0,%0), xops); \
} \
while (0)
/* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
the stack pointer does not matter. The value is tested only in
functions that have frame pointers.
@ -925,36 +1148,35 @@ do { \
After reload, it makes no difference, since pseudo regs have
been eliminated by then. */
#ifndef REG_OK_STRICT
/* Nonzero if X is a hard reg that can be used as an index or if
it is a pseudo reg. */
#define REG_OK_FOR_INDEX_P(X) \
(REGNO (X) < STACK_POINTER_REGNUM \
/* Non strict versions, pseudos are ok */
#define REG_OK_FOR_INDEX_NONSTRICT_P(X) \
(REGNO (X) < STACK_POINTER_REGNUM \
|| REGNO (X) >= FIRST_PSEUDO_REGISTER)
/* Nonzero if X is a hard reg that can be used as a base reg
of if it is a pseudo reg. */
/* ?wfs */
#define REG_OK_FOR_BASE_NONSTRICT_P(X) \
(REGNO (X) <= STACK_POINTER_REGNUM \
|| REGNO (X) == ARG_POINTER_REGNUM \
|| REGNO (X) >= FIRST_PSEUDO_REGISTER)
#define REG_OK_FOR_BASE_P(X) \
(REGNO (X) <= STACK_POINTER_REGNUM \
|| REGNO (X) == ARG_POINTER_REGNUM \
|| REGNO(X) >= FIRST_PSEUDO_REGISTER)
#define REG_OK_FOR_STRREG_P(X) \
#define REG_OK_FOR_STRREG_NONSTRICT_P(X) \
(REGNO (X) == 4 || REGNO (X) == 5 || REGNO (X) >= FIRST_PSEUDO_REGISTER)
#else
/* Nonzero if X is a hard reg that can be used as an index. */
#define REG_OK_FOR_INDEX_P(X) REGNO_OK_FOR_INDEX_P (REGNO (X))
/* Nonzero if X is a hard reg that can be used as a base reg. */
#define REG_OK_FOR_BASE_P(X) REGNO_OK_FOR_BASE_P (REGNO (X))
#define REG_OK_FOR_STRREG_P(X) \
/* Strict versions, hard registers only */
#define REG_OK_FOR_INDEX_STRICT_P(X) REGNO_OK_FOR_INDEX_P (REGNO (X))
#define REG_OK_FOR_BASE_STRICT_P(X) REGNO_OK_FOR_BASE_P (REGNO (X))
#define REG_OK_FOR_STRREG_STRICT_P(X) \
(REGNO_OK_FOR_DIREG_P (REGNO (X)) || REGNO_OK_FOR_SIREG_P (REGNO (X)))
#ifndef REG_OK_STRICT
#define REG_OK_FOR_INDEX_P(X) REG_OK_FOR_INDEX_NONSTRICT_P(X)
#define REG_OK_FOR_BASE_P(X) REG_OK_FOR_BASE_NONSTRICT_P(X)
#define REG_OK_FOR_STRREG_P(X) REG_OK_FOR_STRREG_NONSTRICT_P(X)
#else
#define REG_OK_FOR_INDEX_P(X) REG_OK_FOR_INDEX_STRICT_P(X)
#define REG_OK_FOR_BASE_P(X) REG_OK_FOR_BASE_STRICT_P(X)
#define REG_OK_FOR_STRREG_P(X) REG_OK_FOR_STRREG_STRICT_P(X)
#endif
/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
@ -980,62 +1202,22 @@ do { \
#define LEGITIMATE_CONSTANT_P(X) 1
#define GO_IF_INDEXABLE_BASE(X, ADDR) \
if (GET_CODE (X) == REG && REG_OK_FOR_BASE_P (X)) goto ADDR
#define LEGITIMATE_INDEX_REG_P(X) \
(GET_CODE (X) == REG && REG_OK_FOR_INDEX_P (X))
/* Return 1 if X is an index or an index times a scale. */
#define LEGITIMATE_INDEX_P(X) \
(LEGITIMATE_INDEX_REG_P (X) \
|| (GET_CODE (X) == MULT \
&& LEGITIMATE_INDEX_REG_P (XEXP (X, 0)) \
&& GET_CODE (XEXP (X, 1)) == CONST_INT \
&& (INTVAL (XEXP (X, 1)) == 2 \
|| INTVAL (XEXP (X, 1)) == 4 \
|| INTVAL (XEXP (X, 1)) == 8)))
/* Go to ADDR if X is an index term, a base reg, or a sum of those. */
#define GO_IF_INDEXING(X, ADDR) \
{ if (LEGITIMATE_INDEX_P (X)) goto ADDR; \
GO_IF_INDEXABLE_BASE (X, ADDR); \
if (GET_CODE (X) == PLUS && LEGITIMATE_INDEX_P (XEXP (X, 0))) \
{ GO_IF_INDEXABLE_BASE (XEXP (X, 1), ADDR); } \
if (GET_CODE (X) == PLUS && LEGITIMATE_INDEX_P (XEXP (X, 1))) \
{ GO_IF_INDEXABLE_BASE (XEXP (X, 0), ADDR); } }
/* We used to allow this, but it isn't ever used.
|| ((GET_CODE (X) == POST_DEC || GET_CODE (X) == POST_INC) \
&& REG_P (XEXP (X, 0)) \
&& REG_OK_FOR_STRREG_P (XEXP (X, 0))) \
*/
#define GO_IF_LEGITIMATE_ADDRESS(MODE, X, ADDR) \
#ifdef REG_OK_STRICT
#define GO_IF_LEGITIMATE_ADDRESS(MODE, X, ADDR) \
{ \
if (CONSTANT_ADDRESS_P (X) \
&& (! flag_pic || LEGITIMATE_PIC_OPERAND_P (X))) \
if (legitimate_address_p (MODE, X, 1)) \
goto ADDR; \
GO_IF_INDEXING (X, ADDR); \
if (GET_CODE (X) == PLUS && CONSTANT_ADDRESS_P (XEXP (X, 1))) \
{ \
rtx x0 = XEXP (X, 0); \
if (! flag_pic || ! SYMBOLIC_CONST (XEXP (X, 1))) \
{ GO_IF_INDEXING (x0, ADDR); } \
else if (x0 == pic_offset_table_rtx) \
goto ADDR; \
else if (GET_CODE (x0) == PLUS) \
{ \
if (XEXP (x0, 0) == pic_offset_table_rtx) \
{ GO_IF_INDEXABLE_BASE (XEXP (x0, 1), ADDR); } \
if (XEXP (x0, 1) == pic_offset_table_rtx) \
{ GO_IF_INDEXABLE_BASE (XEXP (x0, 0), ADDR); } \
} \
} \
}
#else
#define GO_IF_LEGITIMATE_ADDRESS(MODE, X, ADDR) \
{ \
if (legitimate_address_p (MODE, X, 0)) \
goto ADDR; \
}
#endif
/* Try machine-dependent ways of modifying an illegitimate address
to be legitimate. If we find one, return the new, valid address.
This macro is used in only one place: `memory_address' in explow.c.
@ -1057,38 +1239,13 @@ do { \
When -fpic is used, special handling is needed for symbolic references.
See comments by legitimize_pic_address in i386.c for details. */
#define LEGITIMIZE_ADDRESS(X,OLDX,MODE,WIN) \
{ extern rtx legitimize_pic_address (); \
int ch = (X) != (OLDX); \
if (flag_pic && SYMBOLIC_CONST (X)) \
{ \
(X) = legitimize_pic_address (X, 0); \
if (memory_address_p (MODE, X)) \
goto WIN; \
} \
if (GET_CODE (X) == PLUS) \
{ if (GET_CODE (XEXP (X, 0)) == MULT) \
ch = 1, XEXP (X, 0) = force_operand (XEXP (X, 0), 0); \
if (GET_CODE (XEXP (X, 1)) == MULT) \
ch = 1, XEXP (X, 1) = force_operand (XEXP (X, 1), 0); \
if (ch && GET_CODE (XEXP (X, 1)) == REG \
&& GET_CODE (XEXP (X, 0)) == REG) \
goto WIN; \
if (flag_pic && SYMBOLIC_CONST (XEXP (X, 1))) \
ch = 1, (X) = legitimize_pic_address (X, 0); \
if (ch) { GO_IF_LEGITIMATE_ADDRESS (MODE, X, WIN); } \
if (GET_CODE (XEXP (X, 0)) == REG) \
{ register rtx temp = gen_reg_rtx (Pmode); \
register rtx val = force_operand (XEXP (X, 1), temp); \
if (val != temp) emit_move_insn (temp, val); \
XEXP (X, 1) = temp; \
goto WIN; } \
else if (GET_CODE (XEXP (X, 1)) == REG) \
{ register rtx temp = gen_reg_rtx (Pmode); \
register rtx val = force_operand (XEXP (X, 0), temp); \
if (val != temp) emit_move_insn (temp, val); \
XEXP (X, 0) = temp; \
goto WIN; }}}
#define LEGITIMIZE_ADDRESS(X, OLDX, MODE, WIN) \
{ \
rtx orig_x = (X); \
(X) = legitimize_address (X, OLDX, MODE); \
if (memory_address_p (MODE, X)) \
goto WIN; \
}
/* Nonzero if the constant value X is a legitimate general operand
when generating PIC code. It is given that flag_pic is on and
@ -1312,7 +1469,6 @@ while (0)
stored from the compare operation. Note that we can't use "rtx" here
since it hasn't been defined! */
extern struct rtx_def *i386_compare_op0, *i386_compare_op1;
extern struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
/* Tell final.c how to eliminate redundant test instructions. */
@ -1658,6 +1814,60 @@ extern char *qi_high_reg_name[];
#define RET return ""
#define AT_SP(mode) (gen_rtx (MEM, (mode), stack_pointer_rtx))
/* Functions in i386.c */
extern void override_options ();
extern void order_regs_for_local_alloc ();
extern void output_op_from_reg ();
extern void output_to_reg ();
extern char *singlemove_string ();
extern char *output_move_double ();
extern char *output_move_memory ();
extern char *output_move_pushmem ();
extern int standard_80387_constant_p ();
extern char *output_move_const_single ();
extern int symbolic_operand ();
extern int call_insn_operand ();
extern int expander_call_insn_operand ();
extern int symbolic_reference_mentioned_p ();
extern void emit_pic_move ();
extern void function_prologue ();
extern int simple_386_epilogue ();
extern void function_epilogue ();
extern int legitimate_address_p ();
extern struct rtx_def *legitimize_pic_address ();
extern struct rtx_def *legitimize_address ();
extern void print_operand ();
extern void print_operand_address ();
extern void notice_update_cc ();
extern void split_di ();
extern int binary_387_op ();
extern int shift_op ();
extern int VOIDmode_compare_op ();
extern char *output_387_binary_op ();
extern char *output_fix_trunc ();
extern char *output_float_compare ();
extern char *output_fp_cc0_set ();
extern void save_386_machine_status ();
extern void restore_386_machine_status ();
extern void clear_386_stack_locals ();
extern struct rtx_def *assign_386_stack_local ();
/* Variables in i386.c */
extern char *i386_reg_alloc_order; /* register allocation order */
extern char *hi_reg_name[]; /* names for 16 bit regs */
extern char *qi_reg_name[]; /* names for 8 bit regs (low) */
extern char *qi_high_reg_name[]; /* names for 8 bit regs (high) */
extern enum reg_class regclass_map[]; /* smalled class containing REGNO */
extern struct rtx_def *i386_compare_op0; /* operand 0 for comparisons */
extern struct rtx_def *i386_compare_op1; /* operand 1 for comparisons */
/* External variables used */
extern int optimize; /* optimization level */
extern int obey_regdecls; /* TRUE if stupid register allocation */
/* External functions used */
extern struct rtx_def *force_operand ();
/*
Local variables:
version-control: t

View File

@ -1,5 +1,5 @@
/* Definitions for Unix assembler syntax for the Intel 80386.
Copyright (C) 1988 Free Software Foundation, Inc.
Copyright (C) 1988, 1994 Free Software Foundation, Inc.
This file is part of GNU CC.
@ -28,13 +28,15 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
/* Note that the other files fail to use these
in some of the places where they should. */
#ifdef __STDC__
#if defined(__STDC__) || defined(ALMOST_STDC)
#define AS2(a,b,c) #a " " #b "," #c
#define AS2C(b,c) " " #b "," #c
#define AS3(a,b,c,d) #a " " #b "," #c "," #d
#define AS1(a,b) #a " " #b
#else
#define AS1(a,b) "a b"
#define AS2(a,b,c) "a b,c"
#define AS2C(b,c) " b,c"
#define AS3(a,b,c,d) "a b,c,d"
#endif

View File

@ -32,169 +32,184 @@ enum insn_code {
CODE_FOR_cmpdf_ccfpeq = 40,
CODE_FOR_cmpsf_cc = 41,
CODE_FOR_cmpsf_ccfpeq = 42,
CODE_FOR_movsi = 48,
CODE_FOR_movhi = 51,
CODE_FOR_movstricthi = 52,
CODE_FOR_movqi = 54,
CODE_FOR_movstrictqi = 55,
CODE_FOR_movsf = 57,
CODE_FOR_swapdf = 59,
CODE_FOR_movdf = 60,
CODE_FOR_swapxf = 62,
CODE_FOR_movxf = 63,
CODE_FOR_movdi = 65,
CODE_FOR_zero_extendhisi2 = 66,
CODE_FOR_zero_extendqihi2 = 67,
CODE_FOR_zero_extendqisi2 = 68,
CODE_FOR_zero_extendsidi2 = 69,
CODE_FOR_extendsidi2 = 70,
CODE_FOR_extendhisi2 = 71,
CODE_FOR_extendqihi2 = 72,
CODE_FOR_extendqisi2 = 73,
CODE_FOR_extendsfdf2 = 74,
CODE_FOR_extenddfxf2 = 75,
CODE_FOR_extendsfxf2 = 76,
CODE_FOR_truncdfsf2 = 77,
CODE_FOR_truncxfsf2 = 79,
CODE_FOR_truncxfdf2 = 80,
CODE_FOR_fixuns_truncxfsi2 = 81,
CODE_FOR_fixuns_truncdfsi2 = 82,
CODE_FOR_fixuns_truncsfsi2 = 83,
CODE_FOR_fix_truncxfdi2 = 84,
CODE_FOR_fix_truncdfdi2 = 85,
CODE_FOR_fix_truncsfdi2 = 86,
CODE_FOR_fix_truncxfsi2 = 90,
CODE_FOR_fix_truncdfsi2 = 91,
CODE_FOR_fix_truncsfsi2 = 92,
CODE_FOR_floatsisf2 = 96,
CODE_FOR_floatdisf2 = 97,
CODE_FOR_floatsidf2 = 98,
CODE_FOR_floatdidf2 = 99,
CODE_FOR_floatsixf2 = 100,
CODE_FOR_floatdixf2 = 101,
CODE_FOR_adddi3 = 108,
CODE_FOR_addsi3 = 109,
CODE_FOR_addhi3 = 110,
CODE_FOR_addqi3 = 111,
CODE_FOR_addxf3 = 113,
CODE_FOR_adddf3 = 114,
CODE_FOR_addsf3 = 115,
CODE_FOR_subdi3 = 116,
CODE_FOR_subsi3 = 117,
CODE_FOR_subhi3 = 118,
CODE_FOR_subqi3 = 119,
CODE_FOR_subxf3 = 120,
CODE_FOR_subdf3 = 121,
CODE_FOR_subsf3 = 122,
CODE_FOR_mulhi3 = 124,
CODE_FOR_mulsi3 = 126,
CODE_FOR_umulqihi3 = 127,
CODE_FOR_mulqihi3 = 128,
CODE_FOR_umulsidi3 = 129,
CODE_FOR_mulsidi3 = 130,
CODE_FOR_mulxf3 = 131,
CODE_FOR_muldf3 = 132,
CODE_FOR_mulsf3 = 133,
CODE_FOR_divqi3 = 134,
CODE_FOR_udivqi3 = 135,
CODE_FOR_divxf3 = 136,
CODE_FOR_divdf3 = 137,
CODE_FOR_divsf3 = 138,
CODE_FOR_divmodsi4 = 139,
CODE_FOR_divmodhi4 = 140,
CODE_FOR_udivmodsi4 = 141,
CODE_FOR_udivmodhi4 = 142,
CODE_FOR_andsi3 = 143,
CODE_FOR_andhi3 = 144,
CODE_FOR_andqi3 = 145,
CODE_FOR_iorsi3 = 146,
CODE_FOR_iorhi3 = 147,
CODE_FOR_iorqi3 = 148,
CODE_FOR_xorsi3 = 149,
CODE_FOR_xorhi3 = 150,
CODE_FOR_xorqi3 = 151,
CODE_FOR_negdi2 = 152,
CODE_FOR_negsi2 = 153,
CODE_FOR_neghi2 = 154,
CODE_FOR_negqi2 = 155,
CODE_FOR_negsf2 = 156,
CODE_FOR_negdf2 = 157,
CODE_FOR_negxf2 = 159,
CODE_FOR_abssf2 = 161,
CODE_FOR_absdf2 = 162,
CODE_FOR_absxf2 = 164,
CODE_FOR_sqrtsf2 = 166,
CODE_FOR_sqrtdf2 = 167,
CODE_FOR_sqrtxf2 = 169,
CODE_FOR_sindf2 = 172,
CODE_FOR_sinsf2 = 173,
CODE_FOR_cosdf2 = 175,
CODE_FOR_cossf2 = 176,
CODE_FOR_one_cmplsi2 = 178,
CODE_FOR_one_cmplhi2 = 179,
CODE_FOR_one_cmplqi2 = 180,
CODE_FOR_ashldi3 = 181,
CODE_FOR_ashldi3_const_int = 182,
CODE_FOR_ashldi3_non_const_int = 183,
CODE_FOR_ashlsi3 = 184,
CODE_FOR_ashlhi3 = 185,
CODE_FOR_ashlqi3 = 186,
CODE_FOR_ashrdi3 = 187,
CODE_FOR_ashrdi3_const_int = 188,
CODE_FOR_ashrdi3_non_const_int = 189,
CODE_FOR_ashrsi3 = 190,
CODE_FOR_ashrhi3 = 191,
CODE_FOR_ashrqi3 = 192,
CODE_FOR_lshrdi3 = 193,
CODE_FOR_lshrdi3_const_int = 194,
CODE_FOR_lshrdi3_non_const_int = 195,
CODE_FOR_lshrsi3 = 196,
CODE_FOR_lshrhi3 = 197,
CODE_FOR_lshrqi3 = 198,
CODE_FOR_rotlsi3 = 199,
CODE_FOR_rotlhi3 = 200,
CODE_FOR_rotlqi3 = 201,
CODE_FOR_rotrsi3 = 202,
CODE_FOR_rotrhi3 = 203,
CODE_FOR_rotrqi3 = 204,
CODE_FOR_seq = 211,
CODE_FOR_sne = 213,
CODE_FOR_sgt = 215,
CODE_FOR_sgtu = 217,
CODE_FOR_slt = 219,
CODE_FOR_sltu = 221,
CODE_FOR_sge = 223,
CODE_FOR_sgeu = 225,
CODE_FOR_sle = 227,
CODE_FOR_sleu = 229,
CODE_FOR_beq = 231,
CODE_FOR_bne = 233,
CODE_FOR_bgt = 235,
CODE_FOR_bgtu = 237,
CODE_FOR_blt = 239,
CODE_FOR_bltu = 241,
CODE_FOR_bge = 243,
CODE_FOR_bgeu = 245,
CODE_FOR_ble = 247,
CODE_FOR_bleu = 249,
CODE_FOR_jump = 261,
CODE_FOR_indirect_jump = 262,
CODE_FOR_casesi = 263,
CODE_FOR_tablejump = 265,
CODE_FOR_call_pop = 266,
CODE_FOR_call = 269,
CODE_FOR_call_value_pop = 272,
CODE_FOR_call_value = 275,
CODE_FOR_untyped_call = 278,
CODE_FOR_untyped_return = 281,
CODE_FOR_update_return = 282,
CODE_FOR_return = 283,
CODE_FOR_nop = 284,
CODE_FOR_movstrsi = 285,
CODE_FOR_cmpstrsi = 287,
CODE_FOR_ffssi2 = 290,
CODE_FOR_ffshi2 = 292,
CODE_FOR_strlensi = 307,
CODE_FOR_movsi = 49,
CODE_FOR_movhi = 54,
CODE_FOR_movstricthi = 56,
CODE_FOR_movqi = 61,
CODE_FOR_movstrictqi = 63,
CODE_FOR_movsf = 65,
CODE_FOR_movsf_push_nomove = 66,
CODE_FOR_movsf_push = 67,
CODE_FOR_movsf_mem = 68,
CODE_FOR_movsf_normal = 69,
CODE_FOR_swapsf = 70,
CODE_FOR_movdf = 71,
CODE_FOR_movdf_push_nomove = 72,
CODE_FOR_movdf_push = 73,
CODE_FOR_movdf_mem = 74,
CODE_FOR_movdf_normal = 75,
CODE_FOR_swapdf = 76,
CODE_FOR_movxf = 77,
CODE_FOR_movxf_push_nomove = 78,
CODE_FOR_movxf_push = 79,
CODE_FOR_movxf_mem = 80,
CODE_FOR_movxf_normal = 81,
CODE_FOR_swapxf = 82,
CODE_FOR_movdi = 84,
CODE_FOR_zero_extendhisi2 = 85,
CODE_FOR_zero_extendqihi2 = 86,
CODE_FOR_zero_extendqisi2 = 87,
CODE_FOR_zero_extendsidi2 = 88,
CODE_FOR_extendsidi2 = 89,
CODE_FOR_extendhisi2 = 90,
CODE_FOR_extendqihi2 = 91,
CODE_FOR_extendqisi2 = 92,
CODE_FOR_extendsfdf2 = 93,
CODE_FOR_extenddfxf2 = 94,
CODE_FOR_extendsfxf2 = 95,
CODE_FOR_truncdfsf2 = 96,
CODE_FOR_truncxfsf2 = 98,
CODE_FOR_truncxfdf2 = 99,
CODE_FOR_fixuns_truncxfsi2 = 100,
CODE_FOR_fixuns_truncdfsi2 = 101,
CODE_FOR_fixuns_truncsfsi2 = 102,
CODE_FOR_fix_truncxfdi2 = 103,
CODE_FOR_fix_truncdfdi2 = 104,
CODE_FOR_fix_truncsfdi2 = 105,
CODE_FOR_fix_truncxfsi2 = 109,
CODE_FOR_fix_truncdfsi2 = 110,
CODE_FOR_fix_truncsfsi2 = 111,
CODE_FOR_floatsisf2 = 115,
CODE_FOR_floatdisf2 = 116,
CODE_FOR_floatsidf2 = 117,
CODE_FOR_floatdidf2 = 118,
CODE_FOR_floatsixf2 = 119,
CODE_FOR_floatdixf2 = 120,
CODE_FOR_adddi3 = 127,
CODE_FOR_addsi3 = 128,
CODE_FOR_addhi3 = 129,
CODE_FOR_addqi3 = 130,
CODE_FOR_movsi_lea = 131,
CODE_FOR_addxf3 = 132,
CODE_FOR_adddf3 = 133,
CODE_FOR_addsf3 = 134,
CODE_FOR_subdi3 = 135,
CODE_FOR_subsi3 = 136,
CODE_FOR_subhi3 = 137,
CODE_FOR_subqi3 = 138,
CODE_FOR_subxf3 = 139,
CODE_FOR_subdf3 = 140,
CODE_FOR_subsf3 = 141,
CODE_FOR_mulhi3 = 143,
CODE_FOR_mulsi3 = 145,
CODE_FOR_umulqihi3 = 146,
CODE_FOR_mulqihi3 = 147,
CODE_FOR_umulsidi3 = 148,
CODE_FOR_mulsidi3 = 149,
CODE_FOR_umulsi3_highpart = 150,
CODE_FOR_smulsi3_highpart = 151,
CODE_FOR_mulxf3 = 152,
CODE_FOR_muldf3 = 153,
CODE_FOR_mulsf3 = 154,
CODE_FOR_divqi3 = 155,
CODE_FOR_udivqi3 = 156,
CODE_FOR_divxf3 = 157,
CODE_FOR_divdf3 = 158,
CODE_FOR_divsf3 = 159,
CODE_FOR_divmodsi4 = 160,
CODE_FOR_divmodhi4 = 161,
CODE_FOR_udivmodsi4 = 162,
CODE_FOR_udivmodhi4 = 163,
CODE_FOR_andsi3 = 164,
CODE_FOR_andhi3 = 165,
CODE_FOR_andqi3 = 166,
CODE_FOR_iorsi3 = 167,
CODE_FOR_iorhi3 = 168,
CODE_FOR_iorqi3 = 169,
CODE_FOR_xorsi3 = 170,
CODE_FOR_xorhi3 = 171,
CODE_FOR_xorqi3 = 172,
CODE_FOR_negdi2 = 173,
CODE_FOR_negsi2 = 174,
CODE_FOR_neghi2 = 175,
CODE_FOR_negqi2 = 176,
CODE_FOR_negsf2 = 177,
CODE_FOR_negdf2 = 178,
CODE_FOR_negxf2 = 180,
CODE_FOR_abssf2 = 182,
CODE_FOR_absdf2 = 183,
CODE_FOR_absxf2 = 185,
CODE_FOR_sqrtsf2 = 187,
CODE_FOR_sqrtdf2 = 188,
CODE_FOR_sqrtxf2 = 190,
CODE_FOR_sindf2 = 193,
CODE_FOR_sinsf2 = 194,
CODE_FOR_cosdf2 = 196,
CODE_FOR_cossf2 = 197,
CODE_FOR_one_cmplsi2 = 199,
CODE_FOR_one_cmplhi2 = 200,
CODE_FOR_one_cmplqi2 = 201,
CODE_FOR_ashldi3 = 202,
CODE_FOR_ashldi3_const_int = 203,
CODE_FOR_ashldi3_non_const_int = 204,
CODE_FOR_ashlsi3 = 205,
CODE_FOR_ashlhi3 = 206,
CODE_FOR_ashlqi3 = 207,
CODE_FOR_ashrdi3 = 208,
CODE_FOR_ashrdi3_const_int = 209,
CODE_FOR_ashrdi3_non_const_int = 210,
CODE_FOR_ashrsi3 = 211,
CODE_FOR_ashrhi3 = 212,
CODE_FOR_ashrqi3 = 213,
CODE_FOR_lshrdi3 = 214,
CODE_FOR_lshrdi3_const_int = 215,
CODE_FOR_lshrdi3_non_const_int = 216,
CODE_FOR_lshrsi3 = 217,
CODE_FOR_lshrhi3 = 218,
CODE_FOR_lshrqi3 = 219,
CODE_FOR_rotlsi3 = 220,
CODE_FOR_rotlhi3 = 221,
CODE_FOR_rotlqi3 = 222,
CODE_FOR_rotrsi3 = 223,
CODE_FOR_rotrhi3 = 224,
CODE_FOR_rotrqi3 = 225,
CODE_FOR_seq = 232,
CODE_FOR_sne = 234,
CODE_FOR_sgt = 236,
CODE_FOR_sgtu = 238,
CODE_FOR_slt = 240,
CODE_FOR_sltu = 242,
CODE_FOR_sge = 244,
CODE_FOR_sgeu = 246,
CODE_FOR_sle = 248,
CODE_FOR_sleu = 250,
CODE_FOR_beq = 252,
CODE_FOR_bne = 254,
CODE_FOR_bgt = 256,
CODE_FOR_bgtu = 258,
CODE_FOR_blt = 260,
CODE_FOR_bltu = 262,
CODE_FOR_bge = 264,
CODE_FOR_bgeu = 266,
CODE_FOR_ble = 268,
CODE_FOR_bleu = 270,
CODE_FOR_jump = 282,
CODE_FOR_indirect_jump = 283,
CODE_FOR_casesi = 284,
CODE_FOR_tablejump = 286,
CODE_FOR_call_pop = 287,
CODE_FOR_call = 290,
CODE_FOR_call_value_pop = 293,
CODE_FOR_call_value = 296,
CODE_FOR_untyped_call = 299,
CODE_FOR_blockage = 300,
CODE_FOR_return = 301,
CODE_FOR_nop = 302,
CODE_FOR_movstrsi = 303,
CODE_FOR_cmpstrsi = 305,
CODE_FOR_ffssi2 = 308,
CODE_FOR_ffshi2 = 310,
CODE_FOR_strlensi = 325,
CODE_FOR_nothing };
#define MAX_INSN_CODE ((int) CODE_FOR_nothing)

View File

@ -36,10 +36,23 @@ from the machine description file `md'. */
#define HAVE_movqi 1
#define HAVE_movstrictqi 1
#define HAVE_movsf 1
#define HAVE_swapdf 1
#define HAVE_movsf_push_nomove (!TARGET_MOVE)
#define HAVE_movsf_push 1
#define HAVE_movsf_mem 1
#define HAVE_movsf_normal ((!TARGET_MOVE || GET_CODE (operands[0]) != MEM) || (GET_CODE (operands[1]) != MEM))
#define HAVE_swapsf 1
#define HAVE_movdf 1
#define HAVE_swapxf 1
#define HAVE_movdf_push_nomove (!TARGET_MOVE)
#define HAVE_movdf_push 1
#define HAVE_movdf_mem 1
#define HAVE_movdf_normal ((!TARGET_MOVE || GET_CODE (operands[0]) != MEM) || (GET_CODE (operands[1]) != MEM))
#define HAVE_swapdf 1
#define HAVE_movxf 1
#define HAVE_movxf_push_nomove (!TARGET_MOVE)
#define HAVE_movxf_push 1
#define HAVE_movxf_mem 1
#define HAVE_movxf_normal ((!TARGET_MOVE || GET_CODE (operands[0]) != MEM) || (GET_CODE (operands[1]) != MEM))
#define HAVE_swapxf 1
#define HAVE_movdi 1
#define HAVE_zero_extendhisi2 1
#define HAVE_zero_extendqihi2 1
@ -74,6 +87,7 @@ from the machine description file `md'. */
#define HAVE_addsi3 1
#define HAVE_addhi3 1
#define HAVE_addqi3 1
#define HAVE_movsi_lea 1
#define HAVE_addxf3 (TARGET_80387)
#define HAVE_adddf3 (TARGET_80387)
#define HAVE_addsf3 (TARGET_80387)
@ -88,8 +102,10 @@ from the machine description file `md'. */
#define HAVE_mulsi3 1
#define HAVE_umulqihi3 1
#define HAVE_mulqihi3 1
#define HAVE_umulsidi3 1
#define HAVE_mulsidi3 1
#define HAVE_umulsidi3 (TARGET_WIDE_MULTIPLY)
#define HAVE_mulsidi3 (TARGET_WIDE_MULTIPLY)
#define HAVE_umulsi3_highpart (TARGET_WIDE_MULTIPLY)
#define HAVE_smulsi3_highpart (TARGET_WIDE_MULTIPLY)
#define HAVE_mulxf3 (TARGET_80387)
#define HAVE_muldf3 (TARGET_80387)
#define HAVE_mulsf3 (TARGET_80387)
@ -191,8 +207,7 @@ from the machine description file `md'. */
#define HAVE_call_value_pop 1
#define HAVE_call_value 1
#define HAVE_untyped_call 1
#define HAVE_untyped_return 1
#define HAVE_update_return 1
#define HAVE_blockage 1
#define HAVE_return (simple_386_epilogue ())
#define HAVE_nop 1
#define HAVE_movstrsi 1
@ -236,10 +251,23 @@ extern rtx gen_movstricthi PROTO((rtx, rtx));
extern rtx gen_movqi PROTO((rtx, rtx));
extern rtx gen_movstrictqi PROTO((rtx, rtx));
extern rtx gen_movsf PROTO((rtx, rtx));
extern rtx gen_swapdf PROTO((rtx, rtx));
extern rtx gen_movsf_push_nomove PROTO((rtx, rtx));
extern rtx gen_movsf_push PROTO((rtx, rtx));
extern rtx gen_movsf_mem PROTO((rtx, rtx));
extern rtx gen_movsf_normal PROTO((rtx, rtx));
extern rtx gen_swapsf PROTO((rtx, rtx));
extern rtx gen_movdf PROTO((rtx, rtx));
extern rtx gen_swapxf PROTO((rtx, rtx));
extern rtx gen_movdf_push_nomove PROTO((rtx, rtx));
extern rtx gen_movdf_push PROTO((rtx, rtx));
extern rtx gen_movdf_mem PROTO((rtx, rtx));
extern rtx gen_movdf_normal PROTO((rtx, rtx));
extern rtx gen_swapdf PROTO((rtx, rtx));
extern rtx gen_movxf PROTO((rtx, rtx));
extern rtx gen_movxf_push_nomove PROTO((rtx, rtx));
extern rtx gen_movxf_push PROTO((rtx, rtx));
extern rtx gen_movxf_mem PROTO((rtx, rtx));
extern rtx gen_movxf_normal PROTO((rtx, rtx));
extern rtx gen_swapxf PROTO((rtx, rtx));
extern rtx gen_movdi PROTO((rtx, rtx));
extern rtx gen_zero_extendhisi2 PROTO((rtx, rtx));
extern rtx gen_zero_extendqihi2 PROTO((rtx, rtx));
@ -274,6 +302,7 @@ extern rtx gen_adddi3 PROTO((rtx, rtx, rtx));
extern rtx gen_addsi3 PROTO((rtx, rtx, rtx));
extern rtx gen_addhi3 PROTO((rtx, rtx, rtx));
extern rtx gen_addqi3 PROTO((rtx, rtx, rtx));
extern rtx gen_movsi_lea PROTO((rtx, rtx));
extern rtx gen_addxf3 PROTO((rtx, rtx, rtx));
extern rtx gen_adddf3 PROTO((rtx, rtx, rtx));
extern rtx gen_addsf3 PROTO((rtx, rtx, rtx));
@ -290,6 +319,8 @@ extern rtx gen_umulqihi3 PROTO((rtx, rtx, rtx));
extern rtx gen_mulqihi3 PROTO((rtx, rtx, rtx));
extern rtx gen_umulsidi3 PROTO((rtx, rtx, rtx));
extern rtx gen_mulsidi3 PROTO((rtx, rtx, rtx));
extern rtx gen_umulsi3_highpart PROTO((rtx, rtx, rtx));
extern rtx gen_smulsi3_highpart PROTO((rtx, rtx, rtx));
extern rtx gen_mulxf3 PROTO((rtx, rtx, rtx));
extern rtx gen_muldf3 PROTO((rtx, rtx, rtx));
extern rtx gen_mulsf3 PROTO((rtx, rtx, rtx));
@ -380,8 +411,7 @@ extern rtx gen_indirect_jump PROTO((rtx));
extern rtx gen_casesi PROTO((rtx, rtx, rtx, rtx, rtx));
extern rtx gen_tablejump PROTO((rtx, rtx));
extern rtx gen_untyped_call PROTO((rtx, rtx, rtx));
extern rtx gen_untyped_return PROTO((rtx, rtx));
extern rtx gen_update_return PROTO((rtx));
extern rtx gen_blockage PROTO((void));
extern rtx gen_return PROTO((void));
extern rtx gen_nop PROTO((void));
extern rtx gen_movstrsi PROTO((rtx, rtx, rtx, rtx));
@ -438,10 +468,23 @@ extern rtx gen_movstricthi ();
extern rtx gen_movqi ();
extern rtx gen_movstrictqi ();
extern rtx gen_movsf ();
extern rtx gen_swapdf ();
extern rtx gen_movsf_push_nomove ();
extern rtx gen_movsf_push ();
extern rtx gen_movsf_mem ();
extern rtx gen_movsf_normal ();
extern rtx gen_swapsf ();
extern rtx gen_movdf ();
extern rtx gen_swapxf ();
extern rtx gen_movdf_push_nomove ();
extern rtx gen_movdf_push ();
extern rtx gen_movdf_mem ();
extern rtx gen_movdf_normal ();
extern rtx gen_swapdf ();
extern rtx gen_movxf ();
extern rtx gen_movxf_push_nomove ();
extern rtx gen_movxf_push ();
extern rtx gen_movxf_mem ();
extern rtx gen_movxf_normal ();
extern rtx gen_swapxf ();
extern rtx gen_movdi ();
extern rtx gen_zero_extendhisi2 ();
extern rtx gen_zero_extendqihi2 ();
@ -476,6 +519,7 @@ extern rtx gen_adddi3 ();
extern rtx gen_addsi3 ();
extern rtx gen_addhi3 ();
extern rtx gen_addqi3 ();
extern rtx gen_movsi_lea ();
extern rtx gen_addxf3 ();
extern rtx gen_adddf3 ();
extern rtx gen_addsf3 ();
@ -492,6 +536,8 @@ extern rtx gen_umulqihi3 ();
extern rtx gen_mulqihi3 ();
extern rtx gen_umulsidi3 ();
extern rtx gen_mulsidi3 ();
extern rtx gen_umulsi3_highpart ();
extern rtx gen_smulsi3_highpart ();
extern rtx gen_mulxf3 ();
extern rtx gen_muldf3 ();
extern rtx gen_mulsf3 ();
@ -582,8 +628,7 @@ extern rtx gen_indirect_jump ();
extern rtx gen_casesi ();
extern rtx gen_tablejump ();
extern rtx gen_untyped_call ();
extern rtx gen_untyped_return ();
extern rtx gen_update_return ();
extern rtx gen_blockage ();
extern rtx gen_return ();
extern rtx gen_nop ();
extern rtx gen_movstrsi ();

View File

@ -67,7 +67,7 @@ DEF_MACHMODE (HImode, "HI", MODE_INT, 2, 2, SImode)
/* Pointers on some machines use this type to distinguish them from ints.
Useful if a pointer is 4 bytes but has some bits that are not significant,
so it is really not quite as wide as an integer. */
DEF_MACHMODE (PSImode, "PSI", MODE_PARTIAL_INT, 4, 4, VOIDmode)
DEF_MACHMODE (PSImode, "PSI", MODE_PARTIAL_INT, 4, 4, PDImode)
DEF_MACHMODE (SImode, "SI", MODE_INT, 4, 4, DImode)
DEF_MACHMODE (PDImode, "PDI", MODE_PARTIAL_INT, 8, 8, VOIDmode)
DEF_MACHMODE (DImode, "DI", MODE_INT, 8, 8, TImode)

View File

@ -119,7 +119,7 @@ break the ordinary 'growth' macro.
but in traditional C it is usually long. If we are in ANSI C and
don't already have ptrdiff_t get it. */
#if defined (__STDC__) && ! defined (offsetof)
#if defined (__STDC__) && __STDC__ && ! defined (offsetof)
#if defined (__GNUC__) && defined (IN_GCC)
/* On Next machine, the system's stddef.h screws up if included
after we have defined just ptrdiff_t, so include all of stddef.h.
@ -132,7 +132,7 @@ break the ordinary 'growth' macro.
#include <stddef.h>
#endif
#ifdef __STDC__
#if defined (__STDC__) && __STDC__
#define PTR_INT_TYPE ptrdiff_t
#else
#define PTR_INT_TYPE long
@ -167,7 +167,7 @@ struct obstack /* control current object in current chunk */
/* Declare the external functions we use; they are in obstack.c. */
#ifdef __STDC__
#if defined (__STDC__) && __STDC__
extern void _obstack_newchunk (struct obstack *, int);
extern void _obstack_free (struct obstack *, void *);
extern int _obstack_begin (struct obstack *, int, int,
@ -181,7 +181,7 @@ extern int _obstack_begin ();
extern int _obstack_begin_1 ();
#endif
#ifdef __STDC__
#if defined (__STDC__) && __STDC__
/* Do the function-declarations after the structs
but before defining the macros. */
@ -492,7 +492,7 @@ __extension__ \
(h)->object_base = (h)->next_free, \
__INT_TO_PTR ((h)->temp)))
#ifdef __STDC__
#if defined (__STDC__) && __STDC__
#define obstack_free(h,obj) \
( (h)->temp = (char *)(obj) - (char *) (h)->chunk, \
(((h)->temp > 0 && (h)->temp < (h)->chunk_limit - (char *) (h)->chunk)\

View File

@ -1,5 +1,5 @@
/* Define per-register tables for data flow info and register allocation.
Copyright (C) 1987, 1993 Free Software Foundation, Inc.
Copyright (C) 1987, 1993, 1994 Free Software Foundation, Inc.
This file is part of GNU CC.
@ -53,6 +53,12 @@ extern short *reg_n_sets;
extern short *reg_n_deaths;
/* Indexed by N; says whether a psuedo register N was ever used
within a SUBREG that changes the size of the reg. Some machines prohibit
such objects to be in certain (usually floating-point) registers. */
extern char *reg_changes_size;
/* Get the number of consecutive words required to hold pseudo-reg N. */
#define PSEUDO_REGNO_SIZE(N) \

View File

@ -216,9 +216,10 @@ extern void mark_home_live PROTO((int));
replacement (such as sp), plus an offset. */
extern rtx eliminate_regs PROTO((rtx, enum machine_mode, rtx));
/* Emit code to perform an input reload of IN to RELOADREG. IN is from
operand OPNUM with reload type TYPE. */
extern rtx gen_input_reload PROTO((rtx, rtx, int, enum reload_type));
/* Emit code to perform a reload from IN (which may be a reload register) to
OUT (which may also be a reload register). IN or OUT is from operand
OPNUM with reload type TYPE. */
extern rtx gen_reload PROTO((rtx, rtx, int, enum reload_type));
/* Functions in caller-save.c: */

View File

@ -922,6 +922,10 @@ extern rtx output_constant_def PROTO((union tree_node *));
extern rtx immed_real_const PROTO((union tree_node *));
extern union tree_node *make_tree PROTO((union tree_node *, rtx));
/* Abort routines */
extern void fatal_insn_not_found PROTO((rtx));
extern void fatal_insn PROTO((char *, rtx));
/* Define a default value for STORE_FLAG_VALUE. */
#ifndef STORE_FLAG_VALUE

View File

@ -33,21 +33,17 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
#undef CPP_PREDEFINES
#define CPP_PREDEFINES "-Dunix -Di386 -D__FreeBSD__=2 -D__386BSD__ -Asystem(unix) -Asystem(FreeBSD) -Acpu(i386) -Amachine(i386)"
#if 0
#define INCLUDE_DEFAULTS { \
{ "/usr/include", 0 }, \
{ "/usr/include/g++", 1 }, \
{ 0, 0} \
}
#define ASM_SPEC " %| %{fpic:-k} %{fPIC:-k}"
#endif
/* Like the default, except no -lg. */
#define LIB_SPEC "%{!p:%{!pg:-lc}}%{p:-lc_p}%{pg:-lc_p}"
#define LINK_SPEC \
"%{!nostdlib:%{!r*:%{!e*:-e start}}} -dc -dp %{static:-Bstatic} %{assert*} \
%{p:-Bstatic} %{pg:-Bstatic} %{Z}"
#undef SIZE_TYPE
#define SIZE_TYPE "unsigned int"
@ -55,12 +51,12 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
#define PTRDIFF_TYPE "int"
#undef WCHAR_TYPE
#define WCHAR_TYPE "int"
#define WCHAR_TYPE "short unsigned int"
#define WCHAR_UNSIGNED 0
#define WCHAR_UNSIGNED 1
#undef WCHAR_TYPE_SIZE
#define WCHAR_TYPE_SIZE BITS_PER_WORD
#define WCHAR_TYPE_SIZE 16
#define HAVE_ATEXIT
@ -74,9 +70,16 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
#define FUNCTION_PROFILER(FILE, LABELNO) \
{ \
if (flag_pic) \
fprintf (FILE, "\tcall *mcount@GOT(%%ebx)\n"); \
{ \
fprintf (FILE, "\tleal %sP%d@GOTOFF(%%ebx),%%eax\n", \
LPREFIX, (LABELNO)); \
fprintf (FILE, "\tcall *mcount@GOT(%%ebx)\n"); \
} \
else \
fprintf (FILE, "\tcall mcount\n"); \
{ \
fprintf (FILE, "\tmovl $%sP%d,%%eax\n", LPREFIX, (LABELNO)); \
fprintf (FILE, "\tcall mcount\n"); \
} \
}
#if 0 /* not ready for this; it should be decided at compile time */
@ -183,7 +186,7 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
size_directive_output = 0; \
if (!flag_inhibit_size_directive && DECL_SIZE (DECL)) \
{ \
size_directive_output = 1; \
size_directive_output = 1; \
fprintf (FILE, "\t%s\t ", SIZE_ASM_OP); \
assemble_name (FILE, NAME); \
fprintf (FILE, ",%d\n", int_size_in_bytes (TREE_TYPE (DECL))); \
@ -197,20 +200,21 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
size_directive_output was set
by ASM_DECLARE_OBJECT_NAME when it was run for the same decl. */
#define ASM_FINISH_DECLARE_OBJECT(FILE, DECL, TOP_LEVEL, AT_END) \
do { \
char *name = XSTR (XEXP (DECL_RTL (DECL), 0), 0); \
if (!flag_inhibit_size_directive && DECL_SIZE (DECL) \
&& ! AT_END && TOP_LEVEL \
&& DECL_INITIAL (DECL) == error_mark_node \
&& !size_directive_output) \
{ \
fprintf (FILE, "\t%s\t ", SIZE_ASM_OP); \
assemble_name (FILE, name); \
fprintf (FILE, ",%d\n", int_size_in_bytes (TREE_TYPE (DECL))); \
} \
#define ASM_FINISH_DECLARE_OBJECT(FILE, DECL, TOP_LEVEL, AT_END) \
do { \
char *name = XSTR (XEXP (DECL_RTL (DECL), 0), 0); \
if (!flag_inhibit_size_directive && DECL_SIZE (DECL) \
&& ! AT_END && TOP_LEVEL \
&& DECL_INITIAL (DECL) == error_mark_node \
&& !size_directive_output) \
{ \
fprintf (FILE, "\t%s\t ", SIZE_ASM_OP); \
assemble_name (FILE, name); \
fprintf (FILE, ",%d\n", int_size_in_bytes (TREE_TYPE (DECL)));\
} \
} while (0)
/* This is how to declare the size of a function. */
#define ASM_DECLARE_FUNCTION_SIZE(FILE, FNAME, DECL) \
@ -232,82 +236,9 @@ do { \
} \
} while (0)
/* This section copied from i386/osfrose.h */
/* A C statement or compound statement to output to FILE some
assembler code to initialize basic-block profiling for the current
object module. This code should call the subroutine
`__bb_init_func' once per object module, passing it as its sole
argument the address of a block allocated in the object module.
The name of the block is a local symbol made with this statement:
ASM_GENERATE_INTERNAL_LABEL (BUFFER, "LPBX", 0);
Of course, since you are writing the definition of
`ASM_GENERATE_INTERNAL_LABEL' as well as that of this macro, you
can take a short cut in the definition of this macro and use the
name that you know will result.
The first word of this block is a flag which will be nonzero if the
object module has already been initialized. So test this word
first, and do not call `__bb_init_func' if the flag is nonzero. */
#undef FUNCTION_BLOCK_PROFILER
#define FUNCTION_BLOCK_PROFILER(STREAM, LABELNO) \
do \
{ \
if (!flag_pic) \
{ \
fprintf (STREAM, "\tcmpl $0,%sPBX0\n", LPREFIX); \
fprintf (STREAM, "\tjne 0f\n"); \
fprintf (STREAM, "\tpushl $%sPBX0\n", LPREFIX); \
fprintf (STREAM, "\tcall ___bb_init_func\n"); \
fprintf (STREAM, "0:\n"); \
} \
else \
{ \
fprintf (STREAM, "\tpushl %eax\n"); \
fprintf (STREAM, "\tmovl %sPBX0@GOT(%ebx),%eax\n"); \
fprintf (STREAM, "\tcmpl $0,(%eax)\n"); \
fprintf (STREAM, "\tjne 0f\n"); \
fprintf (STREAM, "\tpushl %eax\n"); \
fprintf (STREAM, "\tcall ___bb_init_func@PLT\n"); \
fprintf (STREAM, "0:\n"); \
fprintf (STREAM, "\tpopl %eax\n"); \
} \
} \
while (0)
/* A C statement or compound statement to increment the count
associated with the basic block number BLOCKNO. Basic blocks are
numbered separately from zero within each compilation. The count
associated with block number BLOCKNO is at index BLOCKNO in a
vector of words; the name of this array is a local symbol made
with this statement:
ASM_GENERATE_INTERNAL_LABEL (BUFFER, "LPBX", 2);
Of course, since you are writing the definition of
`ASM_GENERATE_INTERNAL_LABEL' as well as that of this macro, you
can take a short cut in the definition of this macro and use the
name that you know will result. */
#undef BLOCK_PROFILER
#define BLOCK_PROFILER(STREAM, BLOCKNO) \
do \
{ \
if (!flag_pic) \
fprintf (STREAM, "\tincl %sPBX2+%d\n", LPREFIX, (BLOCKNO)*4); \
else \
{ \
fprintf (STREAM, "\tpushl %eax\n"); \
fprintf (STREAM, "\tmovl %sPBX2@GOT(%ebx),%eax\n", LPREFIX); \
fprintf (STREAM, "\tincl %d(%eax)\n", (BLOCKNO)*4); \
fprintf (STREAM, "\tpopl %eax\n"); \
} \
} \
while (0)
#define ASM_SPEC " %| %{fpic:-k} %{fPIC:-k}"
#define LINK_SPEC \
"%{!nostdlib:%{!r*:%{!e*:-e start}}} -dc -dp %{static:-Bstatic} %{assert*}"
/* This is defined when gcc is compiled in the BSD-directory-tree, and must
* make up for the gap to all the stuff done in the GNU-makefiles.

View File

@ -619,6 +619,10 @@ struct tree_block
function when they are created. */
#define TYPE_NEEDS_CONSTRUCTING(NODE) ((NODE)->type.needs_constructing_flag)
/* Indicates that objects of this type (a UNION_TYPE), should be passed
the same way that the first union alternative would be passed. */
#define TYPE_TRANSPARENT_UNION(NODE) ((NODE)->type.transparent_union_flag)
struct tree_type
{
char common[sizeof (struct tree_common)];
@ -637,6 +641,7 @@ struct tree_type
unsigned string_flag : 1;
unsigned no_force_blk_flag : 1;
unsigned needs_constructing_flag : 1;
unsigned transparent_union_flag : 1;
unsigned lang_flag_0 : 1;
unsigned lang_flag_1 : 1;
unsigned lang_flag_2 : 1;
@ -644,7 +649,7 @@ struct tree_type
unsigned lang_flag_4 : 1;
unsigned lang_flag_5 : 1;
unsigned lang_flag_6 : 1;
/* room for 6 more bits */
/* room for 5 more bits */
unsigned int align;
union tree_node *pointer_to;
@ -928,6 +933,11 @@ struct tree_type
so it should not be output now. */
#define DECL_DEFER_OUTPUT(NODE) ((NODE)->decl.defer_output)
/* Used in PARM_DECLs whose type are unions to indicate that the
argument should be passed in the same way that the first union
alternative would be passed. */
#define DECL_TRANSPARENT_UNION(NODE) ((NODE)->decl.transparent_union)
/* Additional flags for language-specific uses. */
#define DECL_LANG_FLAG_0(NODE) ((NODE)->decl.lang_flag_0)
#define DECL_LANG_FLAG_1(NODE) ((NODE)->decl.lang_flag_1)
@ -963,7 +973,8 @@ struct tree_decl
unsigned in_system_header_flag : 1;
unsigned common_flag : 1;
unsigned defer_output : 1;
/* room for five more */
unsigned transparent_union : 1;
/* room for four more */
unsigned lang_flag_0 : 1;
unsigned lang_flag_1 : 1;
@ -1238,6 +1249,12 @@ extern tree get_pending_sizes PROTO((void));
extern tree sizetype;
/* If nonzero, an upper limit on alignment of structure fields, in bits. */
extern int maximum_field_alignment;
/* If non-zero, the alignment of a bitsting or (power-)set value, in bits. */
extern int set_alignment;
/* Concatenate two lists (chains of TREE_LIST nodes) X and Y
by making the last node in X point to Y.
Returns X, except if X is 0 returns Y. */
@ -1481,6 +1498,9 @@ extern void (*incomplete_decl_finalize_hook) ();
/* In tree.c */
extern char *perm_calloc PROTO((int, long));
extern tree get_set_constructor_bits PROTO((tree, char*, int));
extern tree get_set_constructor_words PROTO((tree,
HOST_WIDE_INT*, int));
/* In stmt.c */

File diff suppressed because it is too large Load Diff

View File

@ -5,10 +5,10 @@
LIB= gcc
INSTALL_PIC_ARCHIVE= yes
SHLIB_MAJOR= 26
SHLIB_MINOR= 0
SHLIB_MINOR= 1
LIB1OBJS= _mulsi3.o _udivsi3.o _divsi3.o _umodsi3.o _modsi3.o _lshrsi3.o _lshlsi3.o _ashrsi3.o _ashlsi3.o _divdf3.o _muldf3.o _negdf2.o _adddf3.o _subdf3.o _fixdfsi.o _fixsfsi.o _floatsidf.o _floatsisf.o _truncdfsf2.o _extendsfdf2.o _addsf3.o _negsf2.o _subsf3.o _mulsf3.o _divsf3.o _eqdf2.o _nedf2.o _gtdf2.o _gedf2.o _ltdf2.o _ledf2.o _eqsf2.o _nesf2.o _gtsf2.o _gesf2.o _ltsf2.o _lesf2.o
LIB2OBJS= _muldi3.o _divdi3.o _moddi3.o _udivdi3.o _umoddi3.o _negdi2.o _lshrdi3.o _lshldi3.o _ashldi3.o _ashrdi3.o _ffsdi2.o _udiv_w_sdiv.o _udivmoddi4.o _cmpdi2.o _ucmpdi2.o _floatdidf.o _floatdisf.o _fixunsdfsi.o _fixunssfsi.o _fixunsdfdi.o _fixdfdi.o _fixunssfdi.o _fixsfdi.o _fixxfdi.o _fixunsxfdi.o _floatdixf.o _fixunsxfsi.o _fixtfdi.o _fixunstfdi.o _floatditf.o __gcc_bcmp.o _varargs.o _eprintf.o _op_new.o _op_vnew.o _new_handler.o _op_delete.o _op_vdel.o _bb.o _shtab.o _clear_cache.o _trampoline.o __main.o _exit.o _ctors.o
LIB2OBJS= _muldi3.o _divdi3.o _moddi3.o _udivdi3.o _umoddi3.o _negdi2.o _lshrdi3.o _lshldi3.o _ashldi3.o _ashrdi3.o _ffsdi2.o _udiv_w_sdiv.o _udivmoddi4.o _cmpdi2.o _ucmpdi2.o _floatdidf.o _floatdisf.o _fixunsdfsi.o _fixunssfsi.o _fixunsdfdi.o _fixdfdi.o _fixunssfdi.o _fixsfdi.o _fixxfdi.o _fixunsxfdi.o _floatdixf.o _fixunsxfsi.o _fixtfdi.o _fixunstfdi.o _floatditf.o __gcc_bcmp.o _varargs.o _eprintf.o _op_new.o _op_vnew.o _new_handler.o _op_delete.o _op_vdel.o _bb.o _shtab.o _clear_cache.o _trampoline.o __main.o _exit.o _ctors.o _eh.o _pure.o
OBJS= ${LIB1OBJS} ${LIB2OBJS}
LIB1SOBJS=${LIB1OBJS:.o=.so}

View File

@ -1428,6 +1428,7 @@ BLOCK_PROFILER_CODE
#undef NULL /* Avoid errors if stdio.h and our stddef.h mismatch. */
#include <stdio.h>
char *ctime ();
#ifdef HAVE_ATEXIT
extern void atexit (void (*) (void));
@ -1995,21 +1996,9 @@ __do_global_dtors ()
#ifdef DO_GLOBAL_DTORS_BODY
DO_GLOBAL_DTORS_BODY;
#else
unsigned nptrs = (unsigned HOST_WIDE_INT) __DTOR_LIST__[0];
unsigned i;
/* Some systems place the number of pointers
in the first word of the table.
On other systems, that word is -1.
In all cases, the table is null-terminated. */
/* If the length is not recorded, count up to the null. */
if (nptrs == -1)
for (nptrs = 0; __DTOR_LIST__[nptrs + 1] != 0; nptrs++);
/* GNU LD format. */
for (i = nptrs; i >= 1; i--)
__DTOR_LIST__[i] ();
func_ptr *p;
for (p = __DTOR_LIST__ + 1; *p; )
(*p++) ();
#endif
}
@ -2149,3 +2138,162 @@ func_ptr __CTOR_LIST__[2];
#include "gbl-ctors.h"
func_ptr __DTOR_LIST__[2];
#endif
#ifdef L_eh
typedef struct {
void *start;
void *end;
void *exception_handler;
} exception_table;
struct exception_table_node {
exception_table *table;
void *start;
void *end;
struct exception_table_node *next;
};
static int except_table_pos = 0;
static void *except_pc = (void *)0;
static struct exception_table_node *exception_table_list = 0;
static exception_table *
find_exception_table (pc)
void* pc;
{
register struct exception_table_node *table = exception_table_list;
for ( ; table != 0; table = table->next)
{
if (table->start <= pc && table->end > pc)
return table->table;
}
return 0;
}
/* this routine takes a pc, and the address of the exception handler associated
with the closest exception table handler entry associated with that PC,
or 0 if there are no table entries the PC fits in. The algorithm works
something like this:
while(current_entry exists) {
if(current_entry.start < pc )
current_entry = next_entry;
else {
if(prev_entry.start <= pc && prev_entry.end > pc) {
save pointer to prev_entry;
return prev_entry.exception_handler;
}
else return 0;
}
}
return 0;
Assuming a correctly sorted table (ascending order) this routine should
return the tighest match...
In the advent of a tie, we have to give the last entry, as it represents
an inner block.
*/
void *
__find_first_exception_table_match(pc)
void *pc;
{
exception_table *table = find_exception_table (pc);
int pos = 0;
int best = 0;
if (table == 0)
return (void*)0;
#if 0
printf("find_first_exception_table_match(): pc = %x!\n",pc);
#endif
except_pc = pc;
#if 0
/* We can't do this yet, as we don't know that the table is sorted. */
do {
++pos;
if (table[pos].start > except_pc)
/* found the first table[pos].start > except_pc, so the previous
entry better be the one we want! */
break;
} while(table[pos].exception_handler != (void*)-1);
--pos;
if (table[pos].start <= except_pc && table[pos].end > except_pc)
{
except_table_pos = pos;
#if 0
printf("find_first_eh_table_match(): found match: %x\n",table[pos].exception_handler);
#endif
return table[pos].exception_handler;
}
#else
while (table[++pos].exception_handler != (void*)-1) {
if (table[pos].start <= except_pc && table[pos].end > except_pc)
{
/* This can apply. Make sure it is better or as good as the previous
best. */
/* The best one ends first. */
if (best == 0 || (table[pos].end <= table[best].end
/* The best one starts last. */
&& table[pos].start >= table[best].start))
best = pos;
}
}
if (best != 0)
return table[best].exception_handler;
#endif
#if 0
printf("find_first_eh_table_match(): else: returning NULL!\n");
#endif
return (void*)0;
}
int
__throw_type_match (const char *catch_type, const char *throw_type)
{
#if 0
printf("__throw_type_match (): catch_type = %s, throw_type = %s\n",
catch_type, throw_type);
#endif
return strcmp (catch_type, throw_type);
}
void
__register_exceptions (exception_table *table)
{
struct exception_table_node *node = (struct exception_table_node*)
malloc (sizeof (struct exception_table_node));
exception_table *range = table + 1;
node->table = table;
/* This look can be optimized away either if the table
is sorted, or if we pass in extra parameters. */
node->start = range->start;
node->end = range->end;
for (range++ ; range->start != (void*)(-1); range++)
{
if (range->start < node->start)
node->start = range->start;
if (range->end < node->end)
node->end = range->end;
}
node->next = exception_table_list;
exception_table_list = node;
}
#endif /* L_eh */
#ifdef L_pure
#define MESSAGE "pure virtual method called\n"
void
__pure_virtual ()
{
write (2, MESSAGE, sizeof (MESSAGE) - 1);
_exit (-1);
}
#endif