1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 88, 92, 93, 94, 1995 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #define CEIL(x,y) (((x) + (y) - 1) / (y))
32 /* Return nonzero if REF is an lvalue valid for this language.
33 Lvalues can be assigned, unless they have TREE_READONLY.
34 Lvalues can have their address taken, unless they have DECL_REGISTER. */
40 if (! language_lvalue_valid (ref))
43 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
46 if (ref == current_class_decl && flag_this_is_variable <= 0)
49 switch (TREE_CODE (ref))
51 /* preincrements and predecrements are valid lvals, provided
52 what they refer to are valid lvals. */
53 case PREINCREMENT_EXPR:
54 case PREDECREMENT_EXPR:
57 return real_lvalue_p (TREE_OPERAND (ref, 0));
63 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
64 && DECL_LANG_SPECIFIC (ref)
65 && DECL_IN_AGGR_P (ref))
72 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
73 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
77 case WITH_CLEANUP_EXPR:
78 return real_lvalue_p (TREE_OPERAND (ref, 0));
80 /* A currently unresolved scope ref. */
82 my_friendly_abort (103);
84 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
86 return real_lvalue_p (TREE_OPERAND (ref, 0))
87 && real_lvalue_p (TREE_OPERAND (ref, 1));
91 return (real_lvalue_p (TREE_OPERAND (ref, 1))
92 && real_lvalue_p (TREE_OPERAND (ref, 2)));
98 return real_lvalue_p (TREE_OPERAND (ref, 1));
102 return (real_lvalue_p (TREE_OPERAND (ref, 0))
103 && real_lvalue_p (TREE_OPERAND (ref, 1)));
113 if (! language_lvalue_valid (ref))
116 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
119 if (ref == current_class_decl && flag_this_is_variable <= 0)
122 switch (TREE_CODE (ref))
124 /* preincrements and predecrements are valid lvals, provided
125 what they refer to are valid lvals. */
126 case PREINCREMENT_EXPR:
127 case PREDECREMENT_EXPR:
130 return lvalue_p (TREE_OPERAND (ref, 0));
136 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
137 && DECL_LANG_SPECIFIC (ref)
138 && DECL_IN_AGGR_P (ref))
145 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
146 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
150 case WITH_CLEANUP_EXPR:
151 return lvalue_p (TREE_OPERAND (ref, 0));
157 if (IS_AGGR_TYPE (TREE_TYPE (ref)))
161 /* A currently unresolved scope ref. */
163 my_friendly_abort (103);
165 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
167 return lvalue_p (TREE_OPERAND (ref, 0))
168 && lvalue_p (TREE_OPERAND (ref, 1));
172 return (lvalue_p (TREE_OPERAND (ref, 1))
173 && lvalue_p (TREE_OPERAND (ref, 2)));
179 return lvalue_p (TREE_OPERAND (ref, 1));
183 return (lvalue_p (TREE_OPERAND (ref, 0))
184 && lvalue_p (TREE_OPERAND (ref, 1)));
190 /* Return nonzero if REF is an lvalue valid for this language;
191 otherwise, print an error message and return zero. */
194 lvalue_or_else (ref, string)
198 int win = lvalue_p (ref);
200 error ("non-lvalue in %s", string);
204 /* INIT is a CALL_EXPR which needs info about its target.
205 TYPE is the type that this initialization should appear to have.
207 Build an encapsulation of the initialization to perform
208 and return it so that it can be processed by language-independent
209 and language-specific expression expanders.
211 If WITH_CLEANUP_P is nonzero, we build a cleanup for this expression.
212 Otherwise, cleanups are not built here. For example, when building
213 an initialization for a stack slot, since the called function handles
214 the cleanup, we would not want to do it here. */
216 build_cplus_new (type, init, with_cleanup_p)
224 slot = build (VAR_DECL, type);
225 layout_decl (slot, 0);
226 rval = build (NEW_EXPR, type,
227 TREE_OPERAND (init, 0), TREE_OPERAND (init, 1), slot);
228 TREE_SIDE_EFFECTS (rval) = 1;
229 TREE_ADDRESSABLE (rval) = 1;
230 rval = build (TARGET_EXPR, type, slot, rval, 0);
231 TREE_SIDE_EFFECTS (rval) = 1;
232 TREE_ADDRESSABLE (rval) = 1;
235 if (with_cleanup_p && TYPE_NEEDS_DESTRUCTOR (type))
237 TREE_OPERAND (rval, 2) = error_mark_node;
238 rval = build (WITH_CLEANUP_EXPR, type, rval, 0,
239 build_delete (build_pointer_type (type),
240 build_unary_op (ADDR_EXPR, slot, 0),
242 LOOKUP_NORMAL|LOOKUP_DESTRUCTOR, 0));
243 TREE_SIDE_EFFECTS (rval) = 1;
244 TREE_ADDRESSABLE (rval) = 1;
250 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
251 these CALL_EXPRs with tree nodes that will perform the cleanups. */
254 break_out_cleanups (exp)
259 if (TREE_CODE (tmp) == CALL_EXPR
260 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
261 return build_cplus_new (TREE_TYPE (tmp), tmp, 1);
263 while (TREE_CODE (tmp) == NOP_EXPR
264 || TREE_CODE (tmp) == CONVERT_EXPR
265 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
267 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
268 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
270 TREE_OPERAND (tmp, 0)
271 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
272 TREE_OPERAND (tmp, 0), 1);
276 tmp = TREE_OPERAND (tmp, 0);
281 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
282 copies where they are found. Returns a deep copy all nodes transitively
283 containing CALL_EXPRs. */
286 break_out_calls (exp)
289 register tree t1, t2;
290 register enum tree_code code;
291 register int changed = 0;
294 if (exp == NULL_TREE)
297 code = TREE_CODE (exp);
299 if (code == CALL_EXPR)
300 return copy_node (exp);
302 /* Don't try and defeat a save_expr, as it should only be done once. */
303 if (code == SAVE_EXPR)
306 switch (TREE_CODE_CLASS (code))
311 case 'c': /* a constant */
312 case 't': /* a type node */
313 case 'x': /* something random, like an identifier or an ERROR_MARK. */
316 case 'd': /* A decl node */
317 #if 0 /* This is bogus. jason 9/21/94 */
319 t1 = break_out_calls (DECL_INITIAL (exp));
320 if (t1 != DECL_INITIAL (exp))
322 exp = copy_node (exp);
323 DECL_INITIAL (exp) = t1;
328 case 'b': /* A block node */
330 /* Don't know how to handle these correctly yet. Must do a
331 break_out_calls on all DECL_INITIAL values for local variables,
332 and also break_out_calls on all sub-blocks and sub-statements. */
337 case 'e': /* an expression */
338 case 'r': /* a reference */
339 case 's': /* an expression with side effects */
340 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
342 t1 = break_out_calls (TREE_OPERAND (exp, i));
343 if (t1 != TREE_OPERAND (exp, i))
345 exp = copy_node (exp);
346 TREE_OPERAND (exp, i) = t1;
351 case '<': /* a comparison expression */
352 case '2': /* a binary arithmetic expression */
353 t2 = break_out_calls (TREE_OPERAND (exp, 1));
354 if (t2 != TREE_OPERAND (exp, 1))
356 case '1': /* a unary arithmetic expression */
357 t1 = break_out_calls (TREE_OPERAND (exp, 0));
358 if (t1 != TREE_OPERAND (exp, 0))
362 if (tree_code_length[(int) code] == 1)
363 return build1 (code, TREE_TYPE (exp), t1);
365 return build (code, TREE_TYPE (exp), t1, t2);
372 extern struct obstack *current_obstack;
373 extern struct obstack permanent_obstack, class_obstack;
374 extern struct obstack *saveable_obstack;
376 /* Here is how primitive or already-canonicalized types' hash
377 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
378 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
380 /* Construct, lay out and return the type of methods belonging to class
381 BASETYPE and whose arguments are described by ARGTYPES and whose values
382 are described by RETTYPE. If each type exists already, reuse it. */
384 build_cplus_method_type (basetype, rettype, argtypes)
385 tree basetype, rettype, argtypes;
391 /* Make a node of the sort we want. */
392 t = make_node (METHOD_TYPE);
394 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
395 TREE_TYPE (t) = rettype;
396 if (IS_SIGNATURE (basetype))
397 ptype = build_signature_pointer_type (TYPE_MAIN_VARIANT (basetype),
398 TYPE_READONLY (basetype),
399 TYPE_VOLATILE (basetype));
401 ptype = build_pointer_type (basetype);
403 /* The actual arglist for this function includes a "hidden" argument
404 which is "this". Put it into the list of argument types. */
406 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
407 TYPE_ARG_TYPES (t) = argtypes;
408 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
410 /* If we already have such a type, use the old one and free this one.
411 Note that it also frees up the above cons cell if found. */
412 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
413 t = type_hash_canon (hashcode, t);
415 if (TYPE_SIZE (t) == 0)
422 build_cplus_staticfn_type (basetype, rettype, argtypes)
423 tree basetype, rettype, argtypes;
428 /* Make a node of the sort we want. */
429 t = make_node (FUNCTION_TYPE);
431 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
432 TREE_TYPE (t) = rettype;
434 TYPE_ARG_TYPES (t) = argtypes;
436 /* If we already have such a type, use the old one and free this one.
437 Note that it also frees up the above cons cell if found. */
438 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
439 t = type_hash_canon (hashcode, t);
441 if (TYPE_SIZE (t) == 0)
448 build_cplus_array_type (elt_type, index_type)
452 register struct obstack *ambient_obstack = current_obstack;
453 register struct obstack *ambient_saveable_obstack = saveable_obstack;
456 /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
457 make this permanent too. */
458 if (TREE_PERMANENT (elt_type)
459 && (index_type == 0 || TREE_PERMANENT (index_type)))
461 current_obstack = &permanent_obstack;
462 saveable_obstack = &permanent_obstack;
465 t = build_array_type (elt_type, index_type);
467 /* Push these needs up so that initialization takes place
469 TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
470 TYPE_NEEDS_DESTRUCTOR (t) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
471 current_obstack = ambient_obstack;
472 saveable_obstack = ambient_saveable_obstack;
476 /* Make a variant type in the proper way for C/C++, propagating qualifiers
477 down to the element type of an array. */
480 cp_build_type_variant (type, constp, volatilep)
482 int constp, volatilep;
484 if (TREE_CODE (type) == ARRAY_TYPE)
486 tree real_main_variant = TYPE_MAIN_VARIANT (type);
488 push_obstacks (TYPE_OBSTACK (real_main_variant),
489 TYPE_OBSTACK (real_main_variant));
490 type = build_cplus_array_type (cp_build_type_variant (TREE_TYPE (type),
494 /* TYPE must be on same obstack as REAL_MAIN_VARIANT. If not,
495 make a copy. (TYPE might have come from the hash table and
496 REAL_MAIN_VARIANT might be in some function's obstack.) */
498 if (TYPE_OBSTACK (type) != TYPE_OBSTACK (real_main_variant))
500 type = copy_node (type);
501 TYPE_POINTER_TO (type) = TYPE_REFERENCE_TO (type) = 0;
504 TYPE_MAIN_VARIANT (type) = real_main_variant;
507 return build_type_variant (type, constp, volatilep);
510 /* Add OFFSET to all base types of T.
512 OFFSET, which is a type offset, is number of bytes.
514 Note that we don't have to worry about having two paths to the
515 same base type, since this type owns its association list. */
517 propagate_binfo_offsets (binfo, offset)
521 tree binfos = BINFO_BASETYPES (binfo);
522 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
524 for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
526 tree base_binfo = TREE_VEC_ELT (binfos, i);
528 if (TREE_VIA_VIRTUAL (base_binfo))
533 tree base_binfos = BINFO_BASETYPES (base_binfo);
536 for (j = i+1; j < n_baselinks; j++)
537 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
539 /* The next basetype offset must take into account the space
540 between the classes, not just the size of each class. */
541 delta = size_binop (MINUS_EXPR,
542 BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
543 BINFO_OFFSET (base_binfo));
548 if (BINFO_OFFSET_ZEROP (base_binfo))
549 BINFO_OFFSET (base_binfo) = offset;
551 BINFO_OFFSET (base_binfo)
552 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
554 BINFO_OFFSET (base_binfo) = offset;
559 tree chain = NULL_TREE;
561 /* Now unshare the structure beneath BASE_BINFO. */
562 for (k = TREE_VEC_LENGTH (base_binfos)-1;
565 tree base_base_binfo = TREE_VEC_ELT (base_binfos, k);
566 if (! TREE_VIA_VIRTUAL (base_base_binfo))
567 TREE_VEC_ELT (base_binfos, k)
568 = make_binfo (BINFO_OFFSET (base_base_binfo),
570 BINFO_VTABLE (base_base_binfo),
571 BINFO_VIRTUALS (base_base_binfo),
573 chain = TREE_VEC_ELT (base_binfos, k);
574 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
575 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
576 BINFO_INHERITANCE_CHAIN (chain) = base_binfo;
578 /* Now propagate the offset to the base types. */
579 propagate_binfo_offsets (base_binfo, offset);
582 /* Go to our next class that counts for offset propagation. */
585 offset = size_binop (PLUS_EXPR, offset, delta);
590 /* Compute the actual offsets that our virtual base classes
591 will have *for this type*. This must be performed after
592 the fields are laid out, since virtual baseclasses must
593 lay down at the end of the record.
595 Returns the maximum number of virtual functions any of the virtual
596 baseclasses provide. */
598 layout_vbasetypes (rec, max)
602 /* Get all the virtual base types that this type uses.
603 The TREE_VALUE slot holds the virtual baseclass type. */
604 tree vbase_types = get_vbase_types (rec);
606 #ifdef STRUCTURE_SIZE_BOUNDARY
607 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
609 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
613 /* Record size so far is CONST_SIZE + VAR_SIZE bits,
614 where CONST_SIZE is an integer
615 and VAR_SIZE is a tree expression.
616 If VAR_SIZE is null, the size is just CONST_SIZE.
617 Naturally we try to avoid using VAR_SIZE. */
618 register unsigned const_size = 0;
619 register tree var_size = 0;
620 int nonvirtual_const_size;
621 tree nonvirtual_var_size;
623 CLASSTYPE_VBASECLASSES (rec) = vbase_types;
625 if (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST)
626 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
628 var_size = TYPE_SIZE (rec);
630 nonvirtual_const_size = const_size;
631 nonvirtual_var_size = var_size;
635 tree basetype = BINFO_TYPE (vbase_types);
638 desired_align = TYPE_ALIGN (basetype);
639 record_align = MAX (record_align, desired_align);
642 offset = integer_zero_node;
645 /* Give each virtual base type the alignment it wants. */
646 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
647 * TYPE_ALIGN (basetype);
648 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
651 if (CLASSTYPE_VSIZE (basetype) > max)
652 max = CLASSTYPE_VSIZE (basetype);
653 BINFO_OFFSET (vbase_types) = offset;
655 if (TREE_CODE (TYPE_SIZE (basetype)) == INTEGER_CST)
657 /* Every virtual baseclass takes a least a UNIT, so that we can
658 take it's address and get something different for each base. */
659 const_size += MAX (BITS_PER_UNIT,
660 TREE_INT_CST_LOW (TYPE_SIZE (basetype))
661 - TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype)));
663 else if (var_size == 0)
664 var_size = TYPE_SIZE (basetype);
666 var_size = size_binop (PLUS_EXPR, var_size, TYPE_SIZE (basetype));
668 vbase_types = TREE_CHAIN (vbase_types);
673 /* Because a virtual base might take a single byte above,
674 we have to re-adjust the total size to make sure it it
675 a multiple of the alignment. */
676 /* Give the whole object the alignment it wants. */
677 const_size = CEIL (const_size, record_align) * record_align;
680 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
681 here, as that is for this class, without any virtual base classes. */
682 TYPE_ALIGN (rec) = record_align;
683 if (const_size != nonvirtual_const_size)
685 CLASSTYPE_VBASE_SIZE (rec)
686 = size_int (const_size - nonvirtual_const_size);
687 TYPE_SIZE (rec) = size_int (const_size);
690 /* Now propagate offset information throughout the lattice
691 under the vbase type. */
692 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
693 vbase_types = TREE_CHAIN (vbase_types))
695 tree base_binfos = BINFO_BASETYPES (vbase_types);
697 BINFO_INHERITANCE_CHAIN (vbase_types) = TYPE_BINFO (rec);
701 tree chain = NULL_TREE;
703 /* Now unshare the structure beneath BASE_BINFO. */
705 for (j = TREE_VEC_LENGTH (base_binfos)-1;
708 tree base_base_binfo = TREE_VEC_ELT (base_binfos, j);
709 if (! TREE_VIA_VIRTUAL (base_base_binfo))
710 TREE_VEC_ELT (base_binfos, j)
711 = make_binfo (BINFO_OFFSET (base_base_binfo),
713 BINFO_VTABLE (base_base_binfo),
714 BINFO_VIRTUALS (base_base_binfo),
716 chain = TREE_VEC_ELT (base_binfos, j);
717 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
718 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
719 BINFO_INHERITANCE_CHAIN (chain) = vbase_types;
722 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
729 /* Lay out the base types of a record type, REC.
730 Tentatively set the size and alignment of REC
731 according to the base types alone.
733 Offsets for immediate nonvirtual baseclasses are also computed here.
735 TYPE_BINFO (REC) should be NULL_TREE on entry, and this routine
736 creates a list of base_binfos in TYPE_BINFO (REC) from BINFOS.
738 Returns list of virtual base classes in a FIELD_DECL chain. */
740 layout_basetypes (rec, binfos)
743 /* Chain to hold all the new FIELD_DECLs which point at virtual
745 tree vbase_decls = NULL_TREE;
747 #ifdef STRUCTURE_SIZE_BOUNDARY
748 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
750 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
753 /* Record size so far is CONST_SIZE + VAR_SIZE bits, where CONST_SIZE is
754 an integer and VAR_SIZE is a tree expression. If VAR_SIZE is null,
755 the size is just CONST_SIZE. Naturally we try to avoid using
756 VAR_SIZE. And so far, we've been successful. */
758 register tree var_size = 0;
761 register unsigned const_size = 0;
762 int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
764 /* Handle basetypes almost like fields, but record their
765 offsets differently. */
767 for (i = 0; i < n_baseclasses; i++)
769 int inc, desired_align, int_vbase_size;
770 register tree base_binfo = TREE_VEC_ELT (binfos, i);
771 register tree basetype = BINFO_TYPE (base_binfo);
774 if (TYPE_SIZE (basetype) == 0)
777 /* This error is now reported in xref_tag, thus giving better
778 location information. */
779 error_with_aggr_type (base_binfo,
780 "base class `%s' has incomplete type");
782 TREE_VIA_PUBLIC (base_binfo) = 1;
783 TREE_VIA_PROTECTED (base_binfo) = 0;
784 TREE_VIA_VIRTUAL (base_binfo) = 0;
786 /* Should handle this better so that
789 class B: private A { virtual void F(); };
791 does not dump core when compiled. */
792 my_friendly_abort (121);
797 /* All basetypes are recorded in the association list of the
800 if (TREE_VIA_VIRTUAL (base_binfo))
803 char *name = (char *)alloca (TYPE_NAME_LENGTH (basetype)
804 + sizeof (VBASE_NAME) + 1);
806 /* The offset for a virtual base class is only used in computing
807 virtual function tables and for initializing virtual base
808 pointers. It is built once `get_vbase_types' is called. */
810 /* If this basetype can come from another vbase pointer
811 without an additional indirection, we will share
812 that pointer. If an indirection is involved, we
813 make our own pointer. */
814 for (j = 0; j < n_baseclasses; j++)
816 tree other_base_binfo = TREE_VEC_ELT (binfos, j);
817 if (! TREE_VIA_VIRTUAL (other_base_binfo)
818 && binfo_member (basetype,
819 CLASSTYPE_VBASECLASSES (BINFO_TYPE (other_base_binfo))))
822 sprintf (name, VBASE_NAME_FORMAT, TYPE_NAME_STRING (basetype));
823 decl = build_lang_decl (FIELD_DECL, get_identifier (name),
824 build_pointer_type (basetype));
825 /* If you change any of the below, take a look at all the
826 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
828 DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
829 DECL_VIRTUAL_P (decl) = 1;
830 DECL_FIELD_CONTEXT (decl) = rec;
831 DECL_CLASS_CONTEXT (decl) = rec;
832 DECL_FCONTEXT (decl) = basetype;
833 DECL_SAVED_INSNS (decl) = NULL_RTX;
834 DECL_FIELD_SIZE (decl) = 0;
835 DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
836 TREE_CHAIN (decl) = vbase_decls;
837 BINFO_VPTR_FIELD (base_binfo) = decl;
840 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
841 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
843 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
844 "destructor `%s' non-virtual");
845 warning ("in inheritance relationship `%s: virtual %s'",
846 TYPE_NAME_STRING (rec),
847 TYPE_NAME_STRING (basetype));
850 /* The space this decl occupies has already been accounted for. */
855 offset = integer_zero_node;
858 /* Give each base type the alignment it wants. */
859 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
860 * TYPE_ALIGN (basetype);
861 offset = size_int ((const_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
864 /* bpk: Disabled this check until someone is willing to
865 claim it as theirs and explain exactly what circumstances
866 warrant the warning. */
867 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
868 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
870 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
871 "destructor `%s' non-virtual");
872 warning ("in inheritance relationship `%s:%s %s'",
873 TYPE_NAME_STRING (rec),
874 TREE_VIA_VIRTUAL (base_binfo) ? " virtual" : "",
875 TYPE_NAME_STRING (basetype));
879 BINFO_OFFSET (base_binfo) = offset;
880 if (CLASSTYPE_VSIZE (basetype))
882 BINFO_VTABLE (base_binfo) = TYPE_BINFO_VTABLE (basetype);
883 BINFO_VIRTUALS (base_binfo) = TYPE_BINFO_VIRTUALS (basetype);
885 TREE_CHAIN (base_binfo) = TYPE_BINFO (rec);
886 TYPE_BINFO (rec) = base_binfo;
888 /* Add only the amount of storage not present in
889 the virtual baseclasses. */
891 int_vbase_size = TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype));
892 if (TREE_INT_CST_LOW (TYPE_SIZE (basetype)) > int_vbase_size)
894 inc = MAX (record_align,
895 (TREE_INT_CST_LOW (TYPE_SIZE (basetype))
898 /* Record must have at least as much alignment as any field. */
899 desired_align = TYPE_ALIGN (basetype);
900 record_align = MAX (record_align, desired_align);
907 CLASSTYPE_SIZE (rec) = size_int (const_size);
909 CLASSTYPE_SIZE (rec) = integer_zero_node;
910 CLASSTYPE_ALIGN (rec) = record_align;
915 /* Hashing of lists so that we don't make duplicates.
916 The entry point is `list_hash_canon'. */
918 /* Each hash table slot is a bucket containing a chain
919 of these structures. */
923 struct list_hash *next; /* Next structure in the bucket. */
924 int hashcode; /* Hash code of this list. */
925 tree list; /* The list recorded here. */
928 /* Now here is the hash table. When recording a list, it is added
929 to the slot whose index is the hash code mod the table size.
930 Note that the hash table is used for several kinds of lists.
931 While all these live in the same table, they are completely independent,
932 and the hash code is computed differently for each of these. */
934 #define TYPE_HASH_SIZE 59
935 struct list_hash *list_hash_table[TYPE_HASH_SIZE];
937 /* Compute a hash code for a list (chain of TREE_LIST nodes
938 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
939 TREE_COMMON slots), by adding the hash codes of the individual entries. */
945 register int hashcode = 0;
947 if (TREE_CHAIN (list))
948 hashcode += TYPE_HASH (TREE_CHAIN (list));
950 if (TREE_VALUE (list))
951 hashcode += TYPE_HASH (TREE_VALUE (list));
954 if (TREE_PURPOSE (list))
955 hashcode += TYPE_HASH (TREE_PURPOSE (list));
961 /* Look in the type hash table for a type isomorphic to TYPE.
962 If one is found, return it. Otherwise return 0. */
965 list_hash_lookup (hashcode, list)
969 register struct list_hash *h;
970 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
971 if (h->hashcode == hashcode
972 && TREE_VIA_VIRTUAL (h->list) == TREE_VIA_VIRTUAL (list)
973 && TREE_VIA_PUBLIC (h->list) == TREE_VIA_PUBLIC (list)
974 && TREE_VIA_PROTECTED (h->list) == TREE_VIA_PROTECTED (list)
975 && TREE_PURPOSE (h->list) == TREE_PURPOSE (list)
976 && TREE_VALUE (h->list) == TREE_VALUE (list)
977 && TREE_CHAIN (h->list) == TREE_CHAIN (list))
979 my_friendly_assert (TREE_TYPE (h->list) == TREE_TYPE (list), 299);
985 /* Add an entry to the list-hash-table
986 for a list TYPE whose hash code is HASHCODE. */
989 list_hash_add (hashcode, list)
993 register struct list_hash *h;
995 h = (struct list_hash *) obstack_alloc (&class_obstack, sizeof (struct list_hash));
996 h->hashcode = hashcode;
998 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
999 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
1002 /* Given TYPE, and HASHCODE its hash code, return the canonical
1003 object for an identical list if one already exists.
1004 Otherwise, return TYPE, and record it as the canonical object
1005 if it is a permanent object.
1007 To use this function, first create a list of the sort you want.
1008 Then compute its hash code from the fields of the list that
1009 make it different from other similar lists.
1010 Then call this function and use the value.
1011 This function frees the list you pass in if it is a duplicate. */
1013 /* Set to 1 to debug without canonicalization. Never set by program. */
1014 static int debug_no_list_hash = 0;
1017 list_hash_canon (hashcode, list)
1023 if (debug_no_list_hash)
1026 t1 = list_hash_lookup (hashcode, list);
1029 obstack_free (&class_obstack, list);
1033 /* If this is a new list, record it for later reuse. */
1034 list_hash_add (hashcode, list);
1040 hash_tree_cons (via_public, via_virtual, via_protected, purpose, value, chain)
1041 int via_public, via_virtual, via_protected;
1042 tree purpose, value, chain;
1044 struct obstack *ambient_obstack = current_obstack;
1048 current_obstack = &class_obstack;
1049 t = tree_cons (purpose, value, chain);
1050 TREE_VIA_PUBLIC (t) = via_public;
1051 TREE_VIA_PROTECTED (t) = via_protected;
1052 TREE_VIA_VIRTUAL (t) = via_virtual;
1053 hashcode = list_hash (t);
1054 t = list_hash_canon (hashcode, t);
1055 current_obstack = ambient_obstack;
1059 /* Constructor for hashed lists. */
1061 hash_tree_chain (value, chain)
1064 struct obstack *ambient_obstack = current_obstack;
1068 current_obstack = &class_obstack;
1069 t = tree_cons (NULL_TREE, value, chain);
1070 hashcode = list_hash (t);
1071 t = list_hash_canon (hashcode, t);
1072 current_obstack = ambient_obstack;
1076 /* Similar, but used for concatenating two lists. */
1078 hash_chainon (list1, list2)
1085 if (TREE_CHAIN (list1) == NULL_TREE)
1086 return hash_tree_chain (TREE_VALUE (list1), list2);
1087 return hash_tree_chain (TREE_VALUE (list1),
1088 hash_chainon (TREE_CHAIN (list1), list2));
1092 get_identifier_list (value)
1095 tree list = IDENTIFIER_AS_LIST (value);
1096 if (list != NULL_TREE
1097 && (TREE_CODE (list) != TREE_LIST
1098 || TREE_VALUE (list) != value))
1100 else if (IDENTIFIER_HAS_TYPE_VALUE (value)
1101 && TREE_CODE (IDENTIFIER_TYPE_VALUE (value)) == RECORD_TYPE
1102 && IDENTIFIER_TYPE_VALUE (value)
1103 == TYPE_MAIN_VARIANT (IDENTIFIER_TYPE_VALUE (value)))
1105 tree type = IDENTIFIER_TYPE_VALUE (value);
1107 if (TYPE_PTRMEMFUNC_P (type))
1109 else if (type == current_class_type)
1110 /* Don't mess up the constructor name. */
1111 list = tree_cons (NULL_TREE, value, NULL_TREE);
1115 /* This will return the correct thing for regular types,
1116 nested types, and templates. Yay! */
1117 if (TYPE_NESTED_NAME (type))
1118 id = TYPE_NESTED_NAME (type);
1120 id = TYPE_IDENTIFIER (type);
1122 if (CLASSTYPE_ID_AS_LIST (type) == NULL_TREE)
1123 CLASSTYPE_ID_AS_LIST (type)
1124 = perm_tree_cons (NULL_TREE, id, NULL_TREE);
1125 list = CLASSTYPE_ID_AS_LIST (type);
1132 get_decl_list (value)
1135 tree list = NULL_TREE;
1137 if (TREE_CODE (value) == IDENTIFIER_NODE)
1138 list = get_identifier_list (value);
1139 else if (TREE_CODE (value) == RECORD_TYPE
1140 && TYPE_LANG_SPECIFIC (value))
1141 list = CLASSTYPE_AS_LIST (value);
1143 if (list != NULL_TREE)
1145 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 301);
1149 return build_decl_list (NULL_TREE, value);
1152 /* Look in the type hash table for a type isomorphic to
1153 `build_tree_list (NULL_TREE, VALUE)'.
1154 If one is found, return it. Otherwise return 0. */
1157 list_hash_lookup_or_cons (value)
1160 register int hashcode = TYPE_HASH (value);
1161 register struct list_hash *h;
1162 struct obstack *ambient_obstack;
1163 tree list = NULL_TREE;
1165 if (TREE_CODE (value) == IDENTIFIER_NODE)
1166 list = get_identifier_list (value);
1167 else if (TREE_CODE (value) == TYPE_DECL
1168 && TREE_CODE (TREE_TYPE (value)) == RECORD_TYPE
1169 && TYPE_LANG_SPECIFIC (TREE_TYPE (value)))
1170 list = CLASSTYPE_ID_AS_LIST (TREE_TYPE (value));
1171 else if (TREE_CODE (value) == RECORD_TYPE
1172 && TYPE_LANG_SPECIFIC (value))
1173 list = CLASSTYPE_AS_LIST (value);
1175 if (list != NULL_TREE)
1177 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 302);
1181 if (debug_no_list_hash)
1182 return hash_tree_chain (value, NULL_TREE);
1184 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
1185 if (h->hashcode == hashcode
1186 && TREE_VIA_VIRTUAL (h->list) == 0
1187 && TREE_VIA_PUBLIC (h->list) == 0
1188 && TREE_VIA_PROTECTED (h->list) == 0
1189 && TREE_PURPOSE (h->list) == 0
1190 && TREE_VALUE (h->list) == value)
1192 my_friendly_assert (TREE_TYPE (h->list) == 0, 303);
1193 my_friendly_assert (TREE_CHAIN (h->list) == 0, 304);
1197 ambient_obstack = current_obstack;
1198 current_obstack = &class_obstack;
1199 list = build_tree_list (NULL_TREE, value);
1200 list_hash_add (hashcode, list);
1201 current_obstack = ambient_obstack;
1205 /* Build an association between TYPE and some parameters:
1207 OFFSET is the offset added to `this' to convert it to a pointer
1210 BINFO is the base binfo to use, if we are deriving from one. This
1211 is necessary, as we want specialized parent binfos from base
1212 classes, so that the VTABLE_NAMEs of bases are for the most derived
1213 type, instead of of the simple type.
1215 VTABLE is the virtual function table with which to initialize
1216 sub-objects of type TYPE.
1218 VIRTUALS are the virtual functions sitting in VTABLE.
1220 CHAIN are more associations we must retain. */
1223 make_binfo (offset, binfo, vtable, virtuals, chain)
1225 tree vtable, virtuals;
1228 tree new_binfo = make_tree_vec (6);
1231 if (TREE_CODE (binfo) == TREE_VEC)
1232 type = BINFO_TYPE (binfo);
1236 binfo = TYPE_BINFO (binfo);
1239 TREE_CHAIN (new_binfo) = chain;
1241 TREE_USED (new_binfo) = TREE_USED (chain);
1243 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1244 BINFO_OFFSET (new_binfo) = offset;
1245 BINFO_VTABLE (new_binfo) = vtable;
1246 BINFO_VIRTUALS (new_binfo) = virtuals;
1247 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1249 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1250 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1254 /* Return the binfo value for ELEM in TYPE. */
1257 binfo_value (elem, type)
1261 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1262 compiler_error ("base class `%s' ambiguous in binfo_value",
1263 TYPE_NAME_STRING (elem));
1265 return TYPE_BINFO (type);
1266 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1268 return get_binfo (elem, type, 0);
1275 register tree prev = 0, tmp, next;
1276 for (tmp = path; tmp; tmp = next)
1278 next = BINFO_INHERITANCE_CHAIN (tmp);
1279 BINFO_INHERITANCE_CHAIN (tmp) = prev;
1286 virtual_member (elem, list)
1293 for (t = list; t; t = TREE_CHAIN (t))
1294 if (elem == BINFO_TYPE (t))
1297 for (t = list; t; t = TREE_CHAIN (t))
1299 tree binfos = BINFO_BASETYPES (t);
1302 if (binfos != NULL_TREE)
1303 for (i = TREE_VEC_LENGTH (binfos)-1; i >= 0; i--)
1305 nval = binfo_value (elem, BINFO_TYPE (TREE_VEC_ELT (binfos, i)));
1308 if (rval && BINFO_OFFSET (nval) != BINFO_OFFSET (rval))
1309 my_friendly_abort (104);
1321 unsigned HOST_WIDE_INT n;
1324 fprintf (stderr, "type \"%s\"; offset = %d\n",
1325 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1326 TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1327 fprintf (stderr, "vtable type:\n");
1328 debug_tree (BINFO_TYPE (elem));
1329 if (BINFO_VTABLE (elem))
1330 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1332 fprintf (stderr, "no vtable decl yet\n");
1333 fprintf (stderr, "virtuals:\n");
1334 virtuals = BINFO_VIRTUALS (elem);
1336 n = skip_rtti_stuff (&virtuals);
1340 tree fndecl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0);
1341 fprintf (stderr, "%s [%d =? %d]\n",
1342 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1343 n, TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1345 virtuals = TREE_CHAIN (virtuals);
1349 /* Return the length of a chain of nodes chained through DECL_CHAIN.
1350 We expect a null pointer to mark the end of the chain.
1351 This is the Lisp primitive `length'. */
1354 decl_list_length (t)
1358 register int len = 0;
1360 my_friendly_assert (TREE_CODE (t) == FUNCTION_DECL
1361 || TREE_CODE (t) == TEMPLATE_DECL, 300);
1362 for (tail = t; tail; tail = DECL_CHAIN (tail))
1372 if (TREE_CODE (t) == FUNCTION_DECL)
1374 else if (TREE_CODE (t) == TREE_LIST)
1375 return decl_list_length (TREE_VALUE (t));
1377 my_friendly_abort (359);
1381 /* Like value_member, but for DECL_CHAINs. */
1383 decl_value_member (elem, list)
1390 list = DECL_CHAIN (list);
1396 is_overloaded_fn (x)
1399 if (TREE_CODE (x) == FUNCTION_DECL)
1402 if (TREE_CODE (x) == TREE_LIST
1403 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1404 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1411 really_overloaded_fn (x)
1414 if (TREE_CODE (x) == TREE_LIST
1415 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1416 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1426 if (TREE_CODE (from) == FUNCTION_DECL)
1429 my_friendly_assert (TREE_CODE (from) == TREE_LIST, 9);
1431 return TREE_VALUE (from);
1435 fnaddr_from_vtable_entry (entry)
1438 if (flag_vtable_thunks)
1441 if (TREE_CODE (func) == ADDR_EXPR)
1442 func = TREE_OPERAND (func, 0);
1443 if (TREE_CODE (func) == THUNK_DECL)
1444 return DECL_INITIAL (func);
1449 return TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry))));
1453 set_fnaddr_from_vtable_entry (entry, value)
1456 if (flag_vtable_thunks)
1459 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry)))) = value;
1463 function_arg_chain (t)
1466 return TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (t)));
1470 promotes_to_aggr_type (t, code)
1472 enum tree_code code;
1474 if (TREE_CODE (t) == code)
1476 return IS_AGGR_TYPE (t);
1480 is_aggr_type_2 (t1, t2)
1483 if (TREE_CODE (t1) != TREE_CODE (t2))
1485 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1488 /* Give message using types TYPE1 and TYPE2 as arguments.
1489 PFN is the function which will print the message;
1490 S is the format string for PFN to use. */
1492 message_2_types (pfn, s, type1, type2)
1497 tree name1 = TYPE_NAME (type1);
1498 tree name2 = TYPE_NAME (type2);
1499 if (TREE_CODE (name1) == TYPE_DECL)
1500 name1 = DECL_NAME (name1);
1501 if (TREE_CODE (name2) == TYPE_DECL)
1502 name2 = DECL_NAME (name2);
1503 (*pfn) (s, IDENTIFIER_POINTER (name1), IDENTIFIER_POINTER (name2));
1506 #define PRINT_RING_SIZE 4
1509 lang_printable_name (decl)
1512 static tree decl_ring[PRINT_RING_SIZE];
1513 static char *print_ring[PRINT_RING_SIZE];
1514 static int ring_counter;
1517 /* Only cache functions. */
1518 if (TREE_CODE (decl) != FUNCTION_DECL
1519 || DECL_LANG_SPECIFIC (decl) == 0)
1520 return decl_as_string (decl, 1);
1522 /* See if this print name is lying around. */
1523 for (i = 0; i < PRINT_RING_SIZE; i++)
1524 if (decl_ring[i] == decl)
1525 /* yes, so return it. */
1526 return print_ring[i];
1528 if (++ring_counter == PRINT_RING_SIZE)
1531 if (current_function_decl != NULL_TREE)
1533 if (decl_ring[ring_counter] == current_function_decl)
1535 if (ring_counter == PRINT_RING_SIZE)
1537 if (decl_ring[ring_counter] == current_function_decl)
1538 my_friendly_abort (106);
1541 if (print_ring[ring_counter])
1542 free (print_ring[ring_counter]);
1545 int print_ret_type_p
1546 = (!DECL_CONSTRUCTOR_P (decl)
1547 && !DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (decl)));
1549 char *name = (char *)decl_as_string (decl, print_ret_type_p);
1550 print_ring[ring_counter] = (char *)malloc (strlen (name) + 1);
1551 strcpy (print_ring[ring_counter], name);
1552 decl_ring[ring_counter] = decl;
1554 return print_ring[ring_counter];
1557 /* Comparison function for sorting identifiers in RAISES lists.
1558 Note that because IDENTIFIER_NODEs are unique, we can sort
1559 them by address, saving an indirection. */
1564 return (HOST_WIDE_INT)TREE_VALUE (*p1) - (HOST_WIDE_INT)TREE_VALUE (*p2);
1567 /* Build the FUNCTION_TYPE or METHOD_TYPE which may throw exceptions
1568 listed in RAISES. */
1570 build_exception_variant (type, raises)
1575 tree v = TYPE_MAIN_VARIANT (type);
1577 tree *a = (tree *)alloca ((list_length (raises)+1) * sizeof (tree));
1578 int constp = TYPE_READONLY (type);
1579 int volatilep = TYPE_VOLATILE (type);
1581 for (v = TYPE_NEXT_VARIANT (v); v; v = TYPE_NEXT_VARIANT (v))
1583 if (TYPE_READONLY (v) != constp
1584 || TYPE_VOLATILE (v) != volatilep)
1587 /* @@ This should do set equality, not exact match. */
1588 if (simple_cst_list_equal (TYPE_RAISES_EXCEPTIONS (v), raises))
1589 /* List of exceptions raised matches previously found list.
1591 @@ Nice to free up storage used in consing up the
1592 @@ list of exceptions raised. */
1596 /* Need to build a new variant. */
1597 v = copy_node (type);
1598 TYPE_NEXT_VARIANT (v) = TYPE_NEXT_VARIANT (type);
1599 TYPE_NEXT_VARIANT (type) = v;
1600 if (raises && ! TREE_PERMANENT (raises))
1602 push_obstacks_nochange ();
1603 end_temporary_allocation ();
1604 raises = copy_list (raises);
1607 TYPE_RAISES_EXCEPTIONS (v) = raises;
1611 /* Subroutine of copy_to_permanent
1613 Assuming T is a node build bottom-up, make it all exist on
1614 permanent obstack, if it is not permanent already. */
1621 enum tree_code code;
1627 if (tmp = func (t), tmp != NULL_TREE)
1630 switch (code = TREE_CODE (t))
1633 return error_mark_node;
1642 tree chain = TREE_CHAIN (t);
1644 TREE_CHAIN (t) = mapcar (chain, func);
1645 TREE_TYPE (t) = mapcar (TREE_TYPE (t), func);
1646 DECL_INITIAL (t) = mapcar (DECL_INITIAL (t), func);
1647 DECL_SIZE (t) = mapcar (DECL_SIZE (t), func);
1653 tree chain = TREE_CHAIN (t);
1655 TREE_PURPOSE (t) = mapcar (TREE_PURPOSE (t), func);
1656 TREE_VALUE (t) = mapcar (TREE_VALUE (t), func);
1657 TREE_CHAIN (t) = mapcar (chain, func);
1663 int len = TREE_VEC_LENGTH (t);
1667 TREE_VEC_ELT (t, len) = mapcar (TREE_VEC_ELT (t, len), func);
1674 return copy_node (t);
1680 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1681 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1682 TREE_OPERAND (t, 2) = mapcar (TREE_OPERAND (t, 2), func);
1687 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1694 case TRUNC_DIV_EXPR:
1695 case TRUNC_MOD_EXPR:
1703 case BIT_ANDTC_EXPR:
1704 case TRUTH_ANDIF_EXPR:
1705 case TRUTH_ORIF_EXPR:
1713 case FLOOR_DIV_EXPR:
1714 case ROUND_DIV_EXPR:
1716 case FLOOR_MOD_EXPR:
1717 case ROUND_MOD_EXPR:
1719 case PREDECREMENT_EXPR:
1720 case PREINCREMENT_EXPR:
1721 case POSTDECREMENT_EXPR:
1722 case POSTINCREMENT_EXPR:
1725 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1726 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1734 case TRUTH_NOT_EXPR:
1738 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1742 return build_pointer_type (mapcar (TREE_TYPE (t), func));
1743 case REFERENCE_TYPE:
1744 return build_reference_type (mapcar (TREE_TYPE (t), func));
1746 return build_function_type (mapcar (TREE_TYPE (t), func),
1747 mapcar (TYPE_ARG_TYPES (t), func));
1749 return build_array_type (mapcar (TREE_TYPE (t), func),
1750 mapcar (TYPE_DOMAIN (t), func));
1752 return build_index_type (mapcar (TYPE_MAX_VALUE (t), func));
1755 return build_offset_type (mapcar (TYPE_OFFSET_BASETYPE (t), func),
1756 mapcar (TREE_TYPE (t), func));
1758 return build_method_type
1759 (mapcar (TYPE_METHOD_BASETYPE (t), func),
1761 (mapcar (TREE_TYPE (t), func),
1762 mapcar (TREE_CHAIN (TYPE_ARG_TYPES (t)), func)));
1765 if (TYPE_PTRMEMFUNC_P (t))
1766 return build_ptrmemfunc_type
1767 (mapcar (TYPE_PTRMEMFUNC_FN_TYPE (t), func));
1768 /* else fall through */
1770 /* This list is incomplete, but should suffice for now.
1771 It is very important that `sorry' does not call
1772 `report_error_function'. That could cause an infinite loop. */
1774 sorry ("initializer contains unrecognized tree code");
1775 return error_mark_node;
1778 my_friendly_abort (107);
1787 if (TREE_PERMANENT (t))
1792 /* Assuming T is a node built bottom-up, make it all exist on
1793 permanent obstack, if it is not permanent already. */
1795 copy_to_permanent (t)
1798 register struct obstack *ambient_obstack = current_obstack;
1799 register struct obstack *ambient_saveable_obstack = saveable_obstack;
1802 if (t == NULL_TREE || TREE_PERMANENT (t))
1805 saveable_obstack = &permanent_obstack;
1806 current_obstack = saveable_obstack;
1807 resume = suspend_momentary ();
1809 t = mapcar (t, perm_manip);
1811 resume_momentary (resume);
1812 current_obstack = ambient_obstack;
1813 saveable_obstack = ambient_saveable_obstack;
1819 print_lang_statistics ()
1821 extern struct obstack maybepermanent_obstack;
1822 print_obstack_statistics ("class_obstack", &class_obstack);
1823 print_obstack_statistics ("permanent_obstack", &permanent_obstack);
1824 print_obstack_statistics ("maybepermanent_obstack", &maybepermanent_obstack);
1825 print_search_statistics ();
1826 print_class_statistics ();
1829 /* This is used by the `assert' macro. It is provided in libgcc.a,
1830 which `cc' doesn't know how to link. Note that the C++ front-end
1831 no longer actually uses the `assert' macro (instead, it calls
1832 my_friendly_assert). But all of the back-end files still need this. */
1834 __eprintf (string, expression, line, filename)
1837 const char *expression;
1839 const char *filename;
1847 fprintf (stderr, string, expression, line, filename);
1852 /* Return, as an INTEGER_CST node, the number of elements for
1853 TYPE (which is an ARRAY_TYPE). This counts only elements of the top array. */
1856 array_type_nelts_top (type)
1859 return fold (build (PLUS_EXPR, sizetype,
1860 array_type_nelts (type),
1864 /* Return, as an INTEGER_CST node, the number of elements for
1865 TYPE (which is an ARRAY_TYPE). This one is a recursive count of all
1866 ARRAY_TYPEs that are clumped together. */
1869 array_type_nelts_total (type)
1872 tree sz = array_type_nelts_top (type);
1873 type = TREE_TYPE (type);
1874 while (TREE_CODE (type) == ARRAY_TYPE)
1876 tree n = array_type_nelts_top (type);
1877 sz = fold (build (MULT_EXPR, sizetype, sz, n));
1878 type = TREE_TYPE (type);
1888 if (TREE_CODE (t) != TREE_LIST && ! TREE_SIDE_EFFECTS (t))
1890 else if (TREE_CODE (t) == TARGET_EXPR)
1891 return build_cplus_new (TREE_TYPE (t),
1892 break_out_target_exprs (TREE_OPERAND (t, 1)), 0);
1896 /* Actually, we'll just clean out the target exprs for the moment. */
1898 break_out_target_exprs (t)
1901 return mapcar (t, bot_manip);
1910 t = build1 (UNSAVE_EXPR, TREE_TYPE (expr), expr);
1911 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (expr);
1915 /* Modify a tree in place so that all the evaluate only once things
1916 are cleared out. Return the EXPR given. */
1918 unsave_expr_now (expr)
1921 enum tree_code code;
1924 if (expr == NULL_TREE)
1927 code = TREE_CODE (expr);
1931 SAVE_EXPR_RTL (expr) = NULL_RTX;
1935 sorry ("TARGET_EXPR reused inside UNSAVE_EXPR");
1939 warning ("RTL_EXPR reused inside UNSAVE_EXPR");
1940 RTL_EXPR_SEQUENCE (expr) = NULL_RTX;
1944 CALL_EXPR_RTL (expr) = NULL_RTX;
1945 if (TREE_OPERAND (expr, 1)
1946 && TREE_CODE (TREE_OPERAND (expr, 1)) == TREE_LIST)
1948 tree exp = TREE_OPERAND (expr, 1);
1951 unsave_expr_now (TREE_VALUE (exp));
1952 exp = TREE_CHAIN (exp);
1957 case WITH_CLEANUP_EXPR:
1958 warning ("WITH_CLEANUP_EXPR reused inside UNSAVE_EXPR");
1959 RTL_EXPR_RTL (expr) = NULL_RTX;
1963 switch (TREE_CODE_CLASS (code))
1965 case 'c': /* a constant */
1966 case 't': /* a type node */
1967 case 'x': /* something random, like an identifier or an ERROR_MARK. */
1968 case 'd': /* A decl node */
1969 case 'b': /* A block node */
1972 case 'e': /* an expression */
1973 case 'r': /* a reference */
1974 case 's': /* an expression with side effects */
1975 case '<': /* a comparison expression */
1976 case '2': /* a binary arithmetic expression */
1977 case '1': /* a unary arithmetic expression */
1978 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
1979 unsave_expr_now (TREE_OPERAND (expr, i));
1983 my_friendly_abort (999);
1987 /* Since cleanup may have SAVE_EXPRs in it, we protect it with an
1988 UNSAVE_EXPR as the backend cannot yet handle SAVE_EXPRs in cleanups
1991 cp_expand_decl_cleanup (decl, cleanup)
1994 return expand_decl_cleanup (decl, unsave_expr (cleanup));