4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License, Version 1.0 only
6 * (the "License"). You may not use this file except in compliance
9 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10 * or http://www.opensolaris.org/os/licensing.
11 * See the License for the specific language governing permissions
12 * and limitations under the License.
14 * When distributing Covered Code, include this CDDL HEADER in each
15 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16 * If applicable, add the following below this CDDL HEADER, with the
17 * fields enclosed by brackets "[]" replaced with your own identifying
18 * information: Portions Copyright [yyyy] [name of copyright owner]
24 * Copyright 2005 Sun Microsystems, Inc. All rights reserved.
25 * Use is subject to license terms.
29 * Copyright (c) 2012 by Delphix. All rights reserved.
32 #include <sys/types.h>
33 #include <sys/sysmacros.h>
34 #include <sys/isa_defs.h>
43 #include <dt_grammar.h>
44 #include <dt_parser.h>
45 #include <dt_provider.h>
47 static void dt_cg_node(dt_node_t *, dt_irlist_t *, dt_regset_t *);
50 dt_cg_node_alloc(uint_t label, dif_instr_t instr)
52 dt_irnode_t *dip = malloc(sizeof (dt_irnode_t));
55 longjmp(yypcb->pcb_jmpbuf, EDT_NOMEM);
57 dip->di_label = label;
58 dip->di_instr = instr;
59 dip->di_extern = NULL;
66 * Code generator wrapper function for ctf_member_info. If we are given a
67 * reference to a forward declaration tag, search the entire type space for
68 * the actual definition and then call ctf_member_info on the result.
71 dt_cg_membinfo(ctf_file_t *fp, ctf_id_t type, const char *s, ctf_membinfo_t *mp)
73 while (ctf_type_kind(fp, type) == CTF_K_FORWARD) {
74 char n[DT_TYPE_NAMELEN];
75 dtrace_typeinfo_t dtt;
77 if (ctf_type_name(fp, type, n, sizeof (n)) == NULL ||
78 dt_type_lookup(n, &dtt) == -1 || (
79 dtt.dtt_ctfp == fp && dtt.dtt_type == type))
80 break; /* unable to improve our position */
83 type = ctf_type_resolve(fp, dtt.dtt_type);
86 if (ctf_member_info(fp, type, s, mp) == CTF_ERR)
87 return (NULL); /* ctf_errno is set for us */
93 dt_cg_xsetx(dt_irlist_t *dlp, dt_ident_t *idp, uint_t lbl, int reg, uint64_t x)
95 int flag = idp != NULL ? DT_INT_PRIVATE : DT_INT_SHARED;
96 int intoff = dt_inttab_insert(yypcb->pcb_inttab, x, flag);
97 dif_instr_t instr = DIF_INSTR_SETX((uint_t)intoff, reg);
100 longjmp(yypcb->pcb_jmpbuf, EDT_NOMEM);
102 if (intoff > DIF_INTOFF_MAX)
103 longjmp(yypcb->pcb_jmpbuf, EDT_INT2BIG);
105 dt_irlist_append(dlp, dt_cg_node_alloc(lbl, instr));
108 dlp->dl_last->di_extern = idp;
112 dt_cg_setx(dt_irlist_t *dlp, int reg, uint64_t x)
114 dt_cg_xsetx(dlp, NULL, DT_LBL_NONE, reg, x);
118 * When loading bit-fields, we want to convert a byte count in the range
119 * 1-8 to the closest power of 2 (e.g. 3->4, 5->8, etc). The clp2() function
120 * is a clever implementation from "Hacker's Delight" by Henry Warren, Jr.
137 * Lookup the correct load opcode to use for the specified node and CTF type.
138 * We determine the size and convert it to a 3-bit index. Our lookup table
139 * is constructed to use a 5-bit index, consisting of the 3-bit size 0-7, a
140 * bit for the sign, and a bit for userland address. For example, a 4-byte
141 * signed load from userland would be at the following table index:
142 * user=1 sign=1 size=4 => binary index 11011 = decimal index 27
145 dt_cg_load(dt_node_t *dnp, ctf_file_t *ctfp, ctf_id_t type)
147 static const uint_t ops[] = {
148 DIF_OP_LDUB, DIF_OP_LDUH, 0, DIF_OP_LDUW,
150 DIF_OP_LDSB, DIF_OP_LDSH, 0, DIF_OP_LDSW,
152 DIF_OP_ULDUB, DIF_OP_ULDUH, 0, DIF_OP_ULDUW,
153 0, 0, 0, DIF_OP_ULDX,
154 DIF_OP_ULDSB, DIF_OP_ULDSH, 0, DIF_OP_ULDSW,
155 0, 0, 0, DIF_OP_ULDX,
162 * If we're loading a bit-field, the size of our load is found by
163 * rounding cte_bits up to a byte boundary and then finding the
164 * nearest power of two to this value (see clp2(), above).
166 if ((dnp->dn_flags & DT_NF_BITFIELD) &&
167 ctf_type_encoding(ctfp, type, &e) != CTF_ERR)
168 size = clp2(P2ROUNDUP(e.cte_bits, NBBY) / NBBY);
170 size = ctf_type_size(ctfp, type);
172 if (size < 1 || size > 8 || (size & (size - 1)) != 0) {
173 xyerror(D_UNKNOWN, "internal error -- cg cannot load "
174 "size %ld when passed by value\n", (long)size);
177 size--; /* convert size to 3-bit index */
179 if (dnp->dn_flags & DT_NF_SIGNED)
181 if (dnp->dn_flags & DT_NF_USERLAND)
188 dt_cg_ptrsize(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp,
191 ctf_file_t *ctfp = dnp->dn_ctfp;
199 type = ctf_type_resolve(ctfp, dnp->dn_type);
200 kind = ctf_type_kind(ctfp, type);
201 assert(kind == CTF_K_POINTER || kind == CTF_K_ARRAY);
203 if (kind == CTF_K_ARRAY) {
204 if (ctf_array_info(ctfp, type, &r) != 0) {
205 yypcb->pcb_hdl->dt_ctferr = ctf_errno(ctfp);
206 longjmp(yypcb->pcb_jmpbuf, EDT_CTF);
208 type = r.ctr_contents;
210 type = ctf_type_reference(ctfp, type);
212 if ((size = ctf_type_size(ctfp, type)) == 1)
213 return; /* multiply or divide by one can be omitted */
215 sreg = dt_regset_alloc(drp);
216 dt_cg_setx(dlp, sreg, size);
217 instr = DIF_INSTR_FMT(op, dreg, sreg, dreg);
218 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
219 dt_regset_free(drp, sreg);
223 * If the result of a "." or "->" operation is a bit-field, we use this routine
224 * to generate an epilogue to the load instruction that extracts the value. In
225 * the diagrams below the "ld??" is the load instruction that is generated to
226 * load the containing word that is generating prior to calling this function.
228 * Epilogue for unsigned fields: Epilogue for signed fields:
230 * ldu? [r1], r1 lds? [r1], r1
231 * setx USHIFT, r2 setx 64 - SSHIFT, r2
232 * srl r1, r2, r1 sll r1, r2, r1
233 * setx (1 << bits) - 1, r2 setx 64 - bits, r2
234 * and r1, r2, r1 sra r1, r2, r1
236 * The *SHIFT constants above changes value depending on the endian-ness of our
237 * target architecture. Refer to the comments below for more details.
240 dt_cg_field_get(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp,
241 ctf_file_t *fp, const ctf_membinfo_t *mp)
248 if (ctf_type_encoding(fp, mp->ctm_type, &e) != 0 || e.cte_bits > 64) {
249 xyerror(D_UNKNOWN, "cg: bad field: off %lu type <%ld> "
250 "bits %u\n", mp->ctm_offset, mp->ctm_type, e.cte_bits);
253 assert(dnp->dn_op == DT_TOK_PTR || dnp->dn_op == DT_TOK_DOT);
254 r1 = dnp->dn_left->dn_reg;
255 r2 = dt_regset_alloc(drp);
258 * On little-endian architectures, ctm_offset counts from the right so
259 * ctm_offset % NBBY itself is the amount we want to shift right to
260 * move the value bits to the little end of the register to mask them.
261 * On big-endian architectures, ctm_offset counts from the left so we
262 * must subtract (ctm_offset % NBBY + cte_bits) from the size in bits
263 * we used for the load. The size of our load in turn is found by
264 * rounding cte_bits up to a byte boundary and then finding the
265 * nearest power of two to this value (see clp2(), above). These
266 * properties are used to compute shift as USHIFT or SSHIFT, below.
268 if (dnp->dn_flags & DT_NF_SIGNED) {
269 #if BYTE_ORDER == _BIG_ENDIAN
270 shift = clp2(P2ROUNDUP(e.cte_bits, NBBY) / NBBY) * NBBY -
271 mp->ctm_offset % NBBY;
273 shift = mp->ctm_offset % NBBY + e.cte_bits;
275 dt_cg_setx(dlp, r2, 64 - shift);
276 instr = DIF_INSTR_FMT(DIF_OP_SLL, r1, r2, r1);
277 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
279 dt_cg_setx(dlp, r2, 64 - e.cte_bits);
280 instr = DIF_INSTR_FMT(DIF_OP_SRA, r1, r2, r1);
281 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
283 #if BYTE_ORDER == _BIG_ENDIAN
284 shift = clp2(P2ROUNDUP(e.cte_bits, NBBY) / NBBY) * NBBY -
285 (mp->ctm_offset % NBBY + e.cte_bits);
287 shift = mp->ctm_offset % NBBY;
289 dt_cg_setx(dlp, r2, shift);
290 instr = DIF_INSTR_FMT(DIF_OP_SRL, r1, r2, r1);
291 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
293 dt_cg_setx(dlp, r2, (1ULL << e.cte_bits) - 1);
294 instr = DIF_INSTR_FMT(DIF_OP_AND, r1, r2, r1);
295 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
298 dt_regset_free(drp, r2);
302 * If the destination of a store operation is a bit-field, we use this routine
303 * to generate a prologue to the store instruction that loads the surrounding
304 * bits, clears the destination field, and ORs in the new value of the field.
305 * In the diagram below the "st?" is the store instruction that is generated to
306 * store the containing word that is generating after calling this function.
308 * ld [dst->dn_reg], r1
309 * setx ~(((1 << cte_bits) - 1) << (ctm_offset % NBBY)), r2
312 * setx (1 << cte_bits) - 1, r2
313 * and src->dn_reg, r2, r2
314 * setx ctm_offset % NBBY, r3
318 * st? r1, [dst->dn_reg]
320 * This routine allocates a new register to hold the value to be stored and
321 * returns it. The caller is responsible for freeing this register later.
324 dt_cg_field_set(dt_node_t *src, dt_irlist_t *dlp,
325 dt_regset_t *drp, dt_node_t *dst)
327 uint64_t cmask, fmask, shift;
333 ctf_file_t *fp, *ofp;
336 assert(dst->dn_op == DT_TOK_PTR || dst->dn_op == DT_TOK_DOT);
337 assert(dst->dn_right->dn_kind == DT_NODE_IDENT);
339 fp = dst->dn_left->dn_ctfp;
340 type = ctf_type_resolve(fp, dst->dn_left->dn_type);
342 if (dst->dn_op == DT_TOK_PTR) {
343 type = ctf_type_reference(fp, type);
344 type = ctf_type_resolve(fp, type);
347 if ((fp = dt_cg_membinfo(ofp = fp, type,
348 dst->dn_right->dn_string, &m)) == NULL) {
349 yypcb->pcb_hdl->dt_ctferr = ctf_errno(ofp);
350 longjmp(yypcb->pcb_jmpbuf, EDT_CTF);
353 if (ctf_type_encoding(fp, m.ctm_type, &e) != 0 || e.cte_bits > 64) {
354 xyerror(D_UNKNOWN, "cg: bad field: off %lu type <%ld> "
355 "bits %u\n", m.ctm_offset, m.ctm_type, e.cte_bits);
358 r1 = dt_regset_alloc(drp);
359 r2 = dt_regset_alloc(drp);
360 r3 = dt_regset_alloc(drp);
363 * Compute shifts and masks. We need to compute "shift" as the amount
364 * we need to shift left to position our field in the containing word.
365 * Refer to the comments in dt_cg_field_get(), above, for more info.
366 * We then compute fmask as the mask that truncates the value in the
367 * input register to width cte_bits, and cmask as the mask used to
368 * pass through the containing bits and zero the field bits.
370 #if BYTE_ORDER == _BIG_ENDIAN
371 shift = clp2(P2ROUNDUP(e.cte_bits, NBBY) / NBBY) * NBBY -
372 (m.ctm_offset % NBBY + e.cte_bits);
374 shift = m.ctm_offset % NBBY;
376 fmask = (1ULL << e.cte_bits) - 1;
377 cmask = ~(fmask << shift);
379 instr = DIF_INSTR_LOAD(
380 dt_cg_load(dst, fp, m.ctm_type), dst->dn_reg, r1);
381 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
383 dt_cg_setx(dlp, r2, cmask);
384 instr = DIF_INSTR_FMT(DIF_OP_AND, r1, r2, r1);
385 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
387 dt_cg_setx(dlp, r2, fmask);
388 instr = DIF_INSTR_FMT(DIF_OP_AND, src->dn_reg, r2, r2);
389 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
391 dt_cg_setx(dlp, r3, shift);
392 instr = DIF_INSTR_FMT(DIF_OP_SLL, r2, r3, r2);
393 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
395 instr = DIF_INSTR_FMT(DIF_OP_OR, r1, r2, r1);
396 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
398 dt_regset_free(drp, r3);
399 dt_regset_free(drp, r2);
405 dt_cg_store(dt_node_t *src, dt_irlist_t *dlp, dt_regset_t *drp, dt_node_t *dst)
413 * If we're loading a bit-field, the size of our store is found by
414 * rounding dst's cte_bits up to a byte boundary and then finding the
415 * nearest power of two to this value (see clp2(), above).
417 if ((dst->dn_flags & DT_NF_BITFIELD) &&
418 ctf_type_encoding(dst->dn_ctfp, dst->dn_type, &e) != CTF_ERR)
419 size = clp2(P2ROUNDUP(e.cte_bits, NBBY) / NBBY);
421 size = dt_node_type_size(src);
423 if (src->dn_flags & DT_NF_REF) {
424 reg = dt_regset_alloc(drp);
425 dt_cg_setx(dlp, reg, size);
426 instr = DIF_INSTR_COPYS(src->dn_reg, reg, dst->dn_reg);
427 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
428 dt_regset_free(drp, reg);
430 if (dst->dn_flags & DT_NF_BITFIELD)
431 reg = dt_cg_field_set(src, dlp, drp, dst);
437 instr = DIF_INSTR_STORE(DIF_OP_STB, reg, dst->dn_reg);
440 instr = DIF_INSTR_STORE(DIF_OP_STH, reg, dst->dn_reg);
443 instr = DIF_INSTR_STORE(DIF_OP_STW, reg, dst->dn_reg);
446 instr = DIF_INSTR_STORE(DIF_OP_STX, reg, dst->dn_reg);
449 xyerror(D_UNKNOWN, "internal error -- cg cannot store "
450 "size %lu when passed by value\n", (ulong_t)size);
452 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
454 if (dst->dn_flags & DT_NF_BITFIELD)
455 dt_regset_free(drp, reg);
460 * Generate code for a typecast or for argument promotion from the type of the
461 * actual to the type of the formal. We need to generate code for casts when
462 * a scalar type is being narrowed or changing signed-ness. We first shift the
463 * desired bits high (losing excess bits if narrowing) and then shift them down
464 * using logical shift (unsigned result) or arithmetic shift (signed result).
467 dt_cg_typecast(const dt_node_t *src, const dt_node_t *dst,
468 dt_irlist_t *dlp, dt_regset_t *drp)
470 size_t srcsize = dt_node_type_size(src);
471 size_t dstsize = dt_node_type_size(dst);
476 if (!dt_node_is_scalar(dst))
477 return; /* not a scalar */
478 if (dstsize == srcsize &&
479 ((src->dn_flags ^ dst->dn_flags) & DT_NF_SIGNED) != 0)
480 return; /* not narrowing or changing signed-ness */
481 if (dstsize > srcsize && (src->dn_flags & DT_NF_SIGNED) == 0)
482 return; /* nothing to do in this case */
484 rg = dt_regset_alloc(drp);
486 if (dstsize > srcsize) {
487 int n = sizeof (uint64_t) * NBBY - srcsize * NBBY;
488 int s = (dstsize - srcsize) * NBBY;
490 dt_cg_setx(dlp, rg, n);
492 instr = DIF_INSTR_FMT(DIF_OP_SLL, src->dn_reg, rg, dst->dn_reg);
493 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
495 if ((dst->dn_flags & DT_NF_SIGNED) || n == s) {
496 instr = DIF_INSTR_FMT(DIF_OP_SRA,
497 dst->dn_reg, rg, dst->dn_reg);
498 dt_irlist_append(dlp,
499 dt_cg_node_alloc(DT_LBL_NONE, instr));
501 dt_cg_setx(dlp, rg, s);
502 instr = DIF_INSTR_FMT(DIF_OP_SRA,
503 dst->dn_reg, rg, dst->dn_reg);
504 dt_irlist_append(dlp,
505 dt_cg_node_alloc(DT_LBL_NONE, instr));
506 dt_cg_setx(dlp, rg, n - s);
507 instr = DIF_INSTR_FMT(DIF_OP_SRL,
508 dst->dn_reg, rg, dst->dn_reg);
509 dt_irlist_append(dlp,
510 dt_cg_node_alloc(DT_LBL_NONE, instr));
512 } else if (dstsize != sizeof (uint64_t)) {
513 int n = sizeof (uint64_t) * NBBY - dstsize * NBBY;
515 dt_cg_setx(dlp, rg, n);
517 instr = DIF_INSTR_FMT(DIF_OP_SLL, src->dn_reg, rg, dst->dn_reg);
518 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
520 instr = DIF_INSTR_FMT((dst->dn_flags & DT_NF_SIGNED) ?
521 DIF_OP_SRA : DIF_OP_SRL, dst->dn_reg, rg, dst->dn_reg);
522 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
525 dt_regset_free(drp, rg);
529 * Generate code to push the specified argument list on to the tuple stack.
530 * We use this routine for handling subroutine calls and associative arrays.
531 * We must first generate code for all subexpressions before loading the stack
532 * because any subexpression could itself require the use of the tuple stack.
533 * This holds a number of registers equal to the number of arguments, but this
534 * is not a huge problem because the number of arguments can't exceed the
535 * number of tuple register stack elements anyway. At most one extra register
536 * is required (either by dt_cg_typecast() or for dtdt_size, below). This
537 * implies that a DIF implementation should offer a number of general purpose
538 * registers at least one greater than the number of tuple registers.
541 dt_cg_arglist(dt_ident_t *idp, dt_node_t *args,
542 dt_irlist_t *dlp, dt_regset_t *drp)
544 const dt_idsig_t *isp = idp->di_data;
548 for (dnp = args; dnp != NULL; dnp = dnp->dn_list)
549 dt_cg_node(dnp, dlp, drp);
551 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, DIF_INSTR_FLUSHTS));
553 for (dnp = args; dnp != NULL; dnp = dnp->dn_list, i++) {
559 dt_node_diftype(yypcb->pcb_hdl, dnp, &t);
561 isp->dis_args[i].dn_reg = dnp->dn_reg; /* re-use register */
562 dt_cg_typecast(dnp, &isp->dis_args[i], dlp, drp);
563 isp->dis_args[i].dn_reg = -1;
565 if (t.dtdt_flags & DIF_TF_BYREF) {
567 if (t.dtdt_size != 0) {
568 reg = dt_regset_alloc(drp);
569 dt_cg_setx(dlp, reg, t.dtdt_size);
578 instr = DIF_INSTR_PUSHTS(op, t.dtdt_kind, reg, dnp->dn_reg);
579 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
580 dt_regset_free(drp, dnp->dn_reg);
582 if (reg != DIF_REG_R0)
583 dt_regset_free(drp, reg);
586 if (i > yypcb->pcb_hdl->dt_conf.dtc_diftupregs)
587 longjmp(yypcb->pcb_jmpbuf, EDT_NOTUPREG);
591 dt_cg_arithmetic_op(dt_node_t *dnp, dt_irlist_t *dlp,
592 dt_regset_t *drp, uint_t op)
594 int is_ptr_op = (dnp->dn_op == DT_TOK_ADD || dnp->dn_op == DT_TOK_SUB ||
595 dnp->dn_op == DT_TOK_ADD_EQ || dnp->dn_op == DT_TOK_SUB_EQ);
597 int lp_is_ptr = dt_node_is_pointer(dnp->dn_left);
598 int rp_is_ptr = dt_node_is_pointer(dnp->dn_right);
602 if (lp_is_ptr && rp_is_ptr) {
603 assert(dnp->dn_op == DT_TOK_SUB);
607 dt_cg_node(dnp->dn_left, dlp, drp);
608 if (is_ptr_op && rp_is_ptr)
609 dt_cg_ptrsize(dnp, dlp, drp, DIF_OP_MUL, dnp->dn_left->dn_reg);
611 dt_cg_node(dnp->dn_right, dlp, drp);
612 if (is_ptr_op && lp_is_ptr)
613 dt_cg_ptrsize(dnp, dlp, drp, DIF_OP_MUL, dnp->dn_right->dn_reg);
615 instr = DIF_INSTR_FMT(op, dnp->dn_left->dn_reg,
616 dnp->dn_right->dn_reg, dnp->dn_left->dn_reg);
618 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
619 dt_regset_free(drp, dnp->dn_right->dn_reg);
620 dnp->dn_reg = dnp->dn_left->dn_reg;
622 if (lp_is_ptr && rp_is_ptr)
623 dt_cg_ptrsize(dnp->dn_right,
624 dlp, drp, DIF_OP_UDIV, dnp->dn_reg);
628 dt_cg_stvar(const dt_ident_t *idp)
630 static const uint_t aops[] = { DIF_OP_STGAA, DIF_OP_STTAA, DIF_OP_NOP };
631 static const uint_t sops[] = { DIF_OP_STGS, DIF_OP_STTS, DIF_OP_STLS };
633 uint_t i = (((idp->di_flags & DT_IDFLG_LOCAL) != 0) << 1) |
634 ((idp->di_flags & DT_IDFLG_TLS) != 0);
636 return (idp->di_kind == DT_IDENT_ARRAY ? aops[i] : sops[i]);
640 dt_cg_prearith_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp, uint_t op)
642 ctf_file_t *ctfp = dnp->dn_ctfp;
648 if (dt_node_is_pointer(dnp)) {
649 type = ctf_type_resolve(ctfp, dnp->dn_type);
650 assert(ctf_type_kind(ctfp, type) == CTF_K_POINTER);
651 size = ctf_type_size(ctfp, ctf_type_reference(ctfp, type));
654 dt_cg_node(dnp->dn_child, dlp, drp);
655 dnp->dn_reg = dnp->dn_child->dn_reg;
657 reg = dt_regset_alloc(drp);
658 dt_cg_setx(dlp, reg, size);
660 instr = DIF_INSTR_FMT(op, dnp->dn_reg, reg, dnp->dn_reg);
661 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
662 dt_regset_free(drp, reg);
665 * If we are modifying a variable, generate an stv instruction from
666 * the variable specified by the identifier. If we are storing to a
667 * memory address, generate code again for the left-hand side using
668 * DT_NF_REF to get the address, and then generate a store to it.
669 * In both paths, we store the value in dnp->dn_reg (the new value).
671 if (dnp->dn_child->dn_kind == DT_NODE_VAR) {
672 dt_ident_t *idp = dt_ident_resolve(dnp->dn_child->dn_ident);
674 idp->di_flags |= DT_IDFLG_DIFW;
675 instr = DIF_INSTR_STV(dt_cg_stvar(idp),
676 idp->di_id, dnp->dn_reg);
677 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
679 uint_t rbit = dnp->dn_child->dn_flags & DT_NF_REF;
681 assert(dnp->dn_child->dn_flags & DT_NF_WRITABLE);
682 assert(dnp->dn_child->dn_flags & DT_NF_LVALUE);
684 dnp->dn_child->dn_flags |= DT_NF_REF; /* force pass-by-ref */
685 dt_cg_node(dnp->dn_child, dlp, drp);
687 dt_cg_store(dnp, dlp, drp, dnp->dn_child);
688 dt_regset_free(drp, dnp->dn_child->dn_reg);
690 dnp->dn_left->dn_flags &= ~DT_NF_REF;
691 dnp->dn_left->dn_flags |= rbit;
696 dt_cg_postarith_op(dt_node_t *dnp, dt_irlist_t *dlp,
697 dt_regset_t *drp, uint_t op)
699 ctf_file_t *ctfp = dnp->dn_ctfp;
705 if (dt_node_is_pointer(dnp)) {
706 type = ctf_type_resolve(ctfp, dnp->dn_type);
707 assert(ctf_type_kind(ctfp, type) == CTF_K_POINTER);
708 size = ctf_type_size(ctfp, ctf_type_reference(ctfp, type));
711 dt_cg_node(dnp->dn_child, dlp, drp);
712 dnp->dn_reg = dnp->dn_child->dn_reg;
714 nreg = dt_regset_alloc(drp);
715 dt_cg_setx(dlp, nreg, size);
716 instr = DIF_INSTR_FMT(op, dnp->dn_reg, nreg, nreg);
717 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
720 * If we are modifying a variable, generate an stv instruction from
721 * the variable specified by the identifier. If we are storing to a
722 * memory address, generate code again for the left-hand side using
723 * DT_NF_REF to get the address, and then generate a store to it.
724 * In both paths, we store the value from 'nreg' (the new value).
726 if (dnp->dn_child->dn_kind == DT_NODE_VAR) {
727 dt_ident_t *idp = dt_ident_resolve(dnp->dn_child->dn_ident);
729 idp->di_flags |= DT_IDFLG_DIFW;
730 instr = DIF_INSTR_STV(dt_cg_stvar(idp), idp->di_id, nreg);
731 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
733 uint_t rbit = dnp->dn_child->dn_flags & DT_NF_REF;
734 int oreg = dnp->dn_reg;
736 assert(dnp->dn_child->dn_flags & DT_NF_WRITABLE);
737 assert(dnp->dn_child->dn_flags & DT_NF_LVALUE);
739 dnp->dn_child->dn_flags |= DT_NF_REF; /* force pass-by-ref */
740 dt_cg_node(dnp->dn_child, dlp, drp);
743 dt_cg_store(dnp, dlp, drp, dnp->dn_child);
746 dt_regset_free(drp, dnp->dn_child->dn_reg);
747 dnp->dn_left->dn_flags &= ~DT_NF_REF;
748 dnp->dn_left->dn_flags |= rbit;
751 dt_regset_free(drp, nreg);
755 * Determine if we should perform signed or unsigned comparison for an OP2.
756 * If both operands are of arithmetic type, perform the usual arithmetic
757 * conversions to determine the common real type for comparison [ISOC 6.5.8.3].
760 dt_cg_compare_signed(dt_node_t *dnp)
764 if (dt_node_is_string(dnp->dn_left) ||
765 dt_node_is_string(dnp->dn_right))
766 return (1); /* strings always compare signed */
767 else if (!dt_node_is_arith(dnp->dn_left) ||
768 !dt_node_is_arith(dnp->dn_right))
769 return (0); /* non-arithmetic types always compare unsigned */
771 bzero(&dn, sizeof (dn));
772 dt_node_promote(dnp->dn_left, dnp->dn_right, &dn);
773 return (dn.dn_flags & DT_NF_SIGNED);
777 dt_cg_compare_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp, uint_t op)
779 uint_t lbl_true = dt_irlist_label(dlp);
780 uint_t lbl_post = dt_irlist_label(dlp);
785 dt_cg_node(dnp->dn_left, dlp, drp);
786 dt_cg_node(dnp->dn_right, dlp, drp);
788 if (dt_node_is_string(dnp->dn_left) || dt_node_is_string(dnp->dn_right))
793 instr = DIF_INSTR_CMP(opc, dnp->dn_left->dn_reg, dnp->dn_right->dn_reg);
794 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
795 dt_regset_free(drp, dnp->dn_right->dn_reg);
796 dnp->dn_reg = dnp->dn_left->dn_reg;
798 instr = DIF_INSTR_BRANCH(op, lbl_true);
799 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
801 instr = DIF_INSTR_MOV(DIF_REG_R0, dnp->dn_reg);
802 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
804 instr = DIF_INSTR_BRANCH(DIF_OP_BA, lbl_post);
805 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
807 dt_cg_xsetx(dlp, NULL, lbl_true, dnp->dn_reg, 1);
808 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_post, DIF_INSTR_NOP));
812 * Code generation for the ternary op requires some trickery with the assembler
813 * in order to conserve registers. We generate code for dn_expr and dn_left
814 * and free their registers so they do not have be consumed across codegen for
815 * dn_right. We insert a dummy MOV at the end of dn_left into the destination
816 * register, which is not yet known because we haven't done dn_right yet, and
817 * save the pointer to this instruction node. We then generate code for
818 * dn_right and use its register as our output. Finally, we reach back and
819 * patch the instruction for dn_left to move its output into this register.
822 dt_cg_ternary_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
824 uint_t lbl_false = dt_irlist_label(dlp);
825 uint_t lbl_post = dt_irlist_label(dlp);
830 dt_cg_node(dnp->dn_expr, dlp, drp);
831 instr = DIF_INSTR_TST(dnp->dn_expr->dn_reg);
832 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
833 dt_regset_free(drp, dnp->dn_expr->dn_reg);
835 instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_false);
836 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
838 dt_cg_node(dnp->dn_left, dlp, drp);
839 instr = DIF_INSTR_MOV(dnp->dn_left->dn_reg, DIF_REG_R0);
840 dip = dt_cg_node_alloc(DT_LBL_NONE, instr); /* save dip for below */
841 dt_irlist_append(dlp, dip);
842 dt_regset_free(drp, dnp->dn_left->dn_reg);
844 instr = DIF_INSTR_BRANCH(DIF_OP_BA, lbl_post);
845 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
847 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_false, DIF_INSTR_NOP));
848 dt_cg_node(dnp->dn_right, dlp, drp);
849 dnp->dn_reg = dnp->dn_right->dn_reg;
852 * Now that dn_reg is assigned, reach back and patch the correct MOV
853 * instruction into the tail of dn_left. We know dn_reg was unused
854 * at that point because otherwise dn_right couldn't have allocated it.
856 dip->di_instr = DIF_INSTR_MOV(dnp->dn_left->dn_reg, dnp->dn_reg);
857 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_post, DIF_INSTR_NOP));
861 dt_cg_logical_and(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
863 uint_t lbl_false = dt_irlist_label(dlp);
864 uint_t lbl_post = dt_irlist_label(dlp);
868 dt_cg_node(dnp->dn_left, dlp, drp);
869 instr = DIF_INSTR_TST(dnp->dn_left->dn_reg);
870 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
871 dt_regset_free(drp, dnp->dn_left->dn_reg);
873 instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_false);
874 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
876 dt_cg_node(dnp->dn_right, dlp, drp);
877 instr = DIF_INSTR_TST(dnp->dn_right->dn_reg);
878 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
879 dnp->dn_reg = dnp->dn_right->dn_reg;
881 instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_false);
882 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
884 dt_cg_setx(dlp, dnp->dn_reg, 1);
886 instr = DIF_INSTR_BRANCH(DIF_OP_BA, lbl_post);
887 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
889 instr = DIF_INSTR_MOV(DIF_REG_R0, dnp->dn_reg);
890 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_false, instr));
892 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_post, DIF_INSTR_NOP));
896 dt_cg_logical_xor(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
898 uint_t lbl_next = dt_irlist_label(dlp);
899 uint_t lbl_tail = dt_irlist_label(dlp);
903 dt_cg_node(dnp->dn_left, dlp, drp);
904 instr = DIF_INSTR_TST(dnp->dn_left->dn_reg);
905 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
907 instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_next);
908 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
909 dt_cg_setx(dlp, dnp->dn_left->dn_reg, 1);
911 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_next, DIF_INSTR_NOP));
912 dt_cg_node(dnp->dn_right, dlp, drp);
914 instr = DIF_INSTR_TST(dnp->dn_right->dn_reg);
915 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
917 instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_tail);
918 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
919 dt_cg_setx(dlp, dnp->dn_right->dn_reg, 1);
921 instr = DIF_INSTR_FMT(DIF_OP_XOR, dnp->dn_left->dn_reg,
922 dnp->dn_right->dn_reg, dnp->dn_left->dn_reg);
924 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_tail, instr));
926 dt_regset_free(drp, dnp->dn_right->dn_reg);
927 dnp->dn_reg = dnp->dn_left->dn_reg;
931 dt_cg_logical_or(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
933 uint_t lbl_true = dt_irlist_label(dlp);
934 uint_t lbl_false = dt_irlist_label(dlp);
935 uint_t lbl_post = dt_irlist_label(dlp);
939 dt_cg_node(dnp->dn_left, dlp, drp);
940 instr = DIF_INSTR_TST(dnp->dn_left->dn_reg);
941 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
942 dt_regset_free(drp, dnp->dn_left->dn_reg);
944 instr = DIF_INSTR_BRANCH(DIF_OP_BNE, lbl_true);
945 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
947 dt_cg_node(dnp->dn_right, dlp, drp);
948 instr = DIF_INSTR_TST(dnp->dn_right->dn_reg);
949 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
950 dnp->dn_reg = dnp->dn_right->dn_reg;
952 instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_false);
953 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
955 dt_cg_xsetx(dlp, NULL, lbl_true, dnp->dn_reg, 1);
957 instr = DIF_INSTR_BRANCH(DIF_OP_BA, lbl_post);
958 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
960 instr = DIF_INSTR_MOV(DIF_REG_R0, dnp->dn_reg);
961 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_false, instr));
963 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_post, DIF_INSTR_NOP));
967 dt_cg_logical_neg(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
969 uint_t lbl_zero = dt_irlist_label(dlp);
970 uint_t lbl_post = dt_irlist_label(dlp);
974 dt_cg_node(dnp->dn_child, dlp, drp);
975 dnp->dn_reg = dnp->dn_child->dn_reg;
977 instr = DIF_INSTR_TST(dnp->dn_reg);
978 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
980 instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_zero);
981 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
983 instr = DIF_INSTR_MOV(DIF_REG_R0, dnp->dn_reg);
984 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
986 instr = DIF_INSTR_BRANCH(DIF_OP_BA, lbl_post);
987 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
989 dt_cg_xsetx(dlp, NULL, lbl_zero, dnp->dn_reg, 1);
990 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_post, DIF_INSTR_NOP));
994 dt_cg_asgn_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
1000 * If we are performing a structure assignment of a translated type,
1001 * we must instantiate all members and create a snapshot of the object
1002 * in scratch space. We allocs a chunk of memory, generate code for
1003 * each member, and then set dnp->dn_reg to the scratch object address.
1005 if ((idp = dt_node_resolve(dnp->dn_right, DT_IDENT_XLSOU)) != NULL) {
1007 dt_xlator_t *dxp = idp->di_data;
1008 dt_node_t *mnp, dn, mn;
1012 * Create two fake dt_node_t's representing operator "." and a
1013 * right-hand identifier child node. These will be repeatedly
1014 * modified according to each instantiated member so that we
1015 * can pass them to dt_cg_store() and effect a member store.
1017 bzero(&dn, sizeof (dt_node_t));
1018 dn.dn_kind = DT_NODE_OP2;
1019 dn.dn_op = DT_TOK_DOT;
1023 bzero(&mn, sizeof (dt_node_t));
1024 mn.dn_kind = DT_NODE_IDENT;
1025 mn.dn_op = DT_TOK_IDENT;
1028 * Allocate a register for our scratch data pointer. First we
1029 * set it to the size of our data structure, and then replace
1030 * it with the result of an allocs of the specified size.
1032 r1 = dt_regset_alloc(drp);
1034 ctf_type_size(dxp->dx_dst_ctfp, dxp->dx_dst_base));
1036 instr = DIF_INSTR_ALLOCS(r1, r1);
1037 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1040 * When dt_cg_asgn_op() is called, we have already generated
1041 * code for dnp->dn_right, which is the translator input. We
1042 * now associate this register with the translator's input
1043 * identifier so it can be referenced during our member loop.
1045 dxp->dx_ident->di_flags |= DT_IDFLG_CGREG;
1046 dxp->dx_ident->di_id = dnp->dn_right->dn_reg;
1048 for (mnp = dxp->dx_members; mnp != NULL; mnp = mnp->dn_list) {
1050 * Generate code for the translator member expression,
1051 * and then cast the result to the member type.
1053 dt_cg_node(mnp->dn_membexpr, dlp, drp);
1054 mnp->dn_reg = mnp->dn_membexpr->dn_reg;
1055 dt_cg_typecast(mnp->dn_membexpr, mnp, dlp, drp);
1058 * Ask CTF for the offset of the member so we can store
1059 * to the appropriate offset. This call has already
1060 * been done once by the parser, so it should succeed.
1062 if (ctf_member_info(dxp->dx_dst_ctfp, dxp->dx_dst_base,
1063 mnp->dn_membname, &ctm) == CTF_ERR) {
1064 yypcb->pcb_hdl->dt_ctferr =
1065 ctf_errno(dxp->dx_dst_ctfp);
1066 longjmp(yypcb->pcb_jmpbuf, EDT_CTF);
1070 * If the destination member is at offset 0, store the
1071 * result directly to r1 (the scratch buffer address).
1072 * Otherwise allocate another temporary for the offset
1073 * and add r1 to it before storing the result.
1075 if (ctm.ctm_offset != 0) {
1076 r2 = dt_regset_alloc(drp);
1079 * Add the member offset rounded down to the
1080 * nearest byte. If the offset was not aligned
1081 * on a byte boundary, this member is a bit-
1082 * field and dt_cg_store() will handle masking.
1084 dt_cg_setx(dlp, r2, ctm.ctm_offset / NBBY);
1085 instr = DIF_INSTR_FMT(DIF_OP_ADD, r1, r2, r2);
1086 dt_irlist_append(dlp,
1087 dt_cg_node_alloc(DT_LBL_NONE, instr));
1089 dt_node_type_propagate(mnp, &dn);
1090 dn.dn_right->dn_string = mnp->dn_membname;
1093 dt_cg_store(mnp, dlp, drp, &dn);
1094 dt_regset_free(drp, r2);
1097 dt_node_type_propagate(mnp, &dn);
1098 dn.dn_right->dn_string = mnp->dn_membname;
1101 dt_cg_store(mnp, dlp, drp, &dn);
1104 dt_regset_free(drp, mnp->dn_reg);
1107 dxp->dx_ident->di_flags &= ~DT_IDFLG_CGREG;
1108 dxp->dx_ident->di_id = 0;
1110 if (dnp->dn_right->dn_reg != -1)
1111 dt_regset_free(drp, dnp->dn_right->dn_reg);
1113 assert(dnp->dn_reg == dnp->dn_right->dn_reg);
1118 * If we are storing to a variable, generate an stv instruction from
1119 * the variable specified by the identifier. If we are storing to a
1120 * memory address, generate code again for the left-hand side using
1121 * DT_NF_REF to get the address, and then generate a store to it.
1122 * In both paths, we assume dnp->dn_reg already has the new value.
1124 if (dnp->dn_left->dn_kind == DT_NODE_VAR) {
1125 idp = dt_ident_resolve(dnp->dn_left->dn_ident);
1127 if (idp->di_kind == DT_IDENT_ARRAY)
1128 dt_cg_arglist(idp, dnp->dn_left->dn_args, dlp, drp);
1130 idp->di_flags |= DT_IDFLG_DIFW;
1131 instr = DIF_INSTR_STV(dt_cg_stvar(idp),
1132 idp->di_id, dnp->dn_reg);
1133 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1135 uint_t rbit = dnp->dn_left->dn_flags & DT_NF_REF;
1137 assert(dnp->dn_left->dn_flags & DT_NF_WRITABLE);
1138 assert(dnp->dn_left->dn_flags & DT_NF_LVALUE);
1140 dnp->dn_left->dn_flags |= DT_NF_REF; /* force pass-by-ref */
1142 dt_cg_node(dnp->dn_left, dlp, drp);
1143 dt_cg_store(dnp, dlp, drp, dnp->dn_left);
1144 dt_regset_free(drp, dnp->dn_left->dn_reg);
1146 dnp->dn_left->dn_flags &= ~DT_NF_REF;
1147 dnp->dn_left->dn_flags |= rbit;
1152 dt_cg_assoc_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
1157 assert(dnp->dn_kind == DT_NODE_VAR);
1158 assert(!(dnp->dn_ident->di_flags & DT_IDFLG_LOCAL));
1159 assert(dnp->dn_args != NULL);
1161 dt_cg_arglist(dnp->dn_ident, dnp->dn_args, dlp, drp);
1163 dnp->dn_reg = dt_regset_alloc(drp);
1165 if (dnp->dn_ident->di_flags & DT_IDFLG_TLS)
1170 dnp->dn_ident->di_flags |= DT_IDFLG_DIFR;
1171 instr = DIF_INSTR_LDV(op, dnp->dn_ident->di_id, dnp->dn_reg);
1172 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1175 * If the associative array is a pass-by-reference type, then we are
1176 * loading its value as a pointer to either load or store through it.
1177 * The array element in question may not have been faulted in yet, in
1178 * which case DIF_OP_LD*AA will return zero. We append an epilogue
1179 * of instructions similar to the following:
1181 * ld?aa id, %r1 ! base ld?aa instruction above
1182 * tst %r1 ! start of epilogue
1189 * label: < rest of code >
1191 * The idea is that we allocs a zero-filled chunk of scratch space and
1192 * do a DIF_OP_ST*AA to fault in and initialize the array element, and
1193 * then reload it to get the faulted-in address of the new variable
1194 * storage. This isn't cheap, but pass-by-ref associative array values
1195 * are (thus far) uncommon and the allocs cost only occurs once. If
1196 * this path becomes important to DTrace users, we can improve things
1197 * by adding a new DIF opcode to fault in associative array elements.
1199 if (dnp->dn_flags & DT_NF_REF) {
1200 uint_t stvop = op == DIF_OP_LDTAA ? DIF_OP_STTAA : DIF_OP_STGAA;
1201 uint_t label = dt_irlist_label(dlp);
1203 instr = DIF_INSTR_TST(dnp->dn_reg);
1204 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1206 instr = DIF_INSTR_BRANCH(DIF_OP_BNE, label);
1207 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1209 dt_cg_setx(dlp, dnp->dn_reg, dt_node_type_size(dnp));
1210 instr = DIF_INSTR_ALLOCS(dnp->dn_reg, dnp->dn_reg);
1211 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1213 dnp->dn_ident->di_flags |= DT_IDFLG_DIFW;
1214 instr = DIF_INSTR_STV(stvop, dnp->dn_ident->di_id, dnp->dn_reg);
1215 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1217 instr = DIF_INSTR_LDV(op, dnp->dn_ident->di_id, dnp->dn_reg);
1218 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1220 dt_irlist_append(dlp, dt_cg_node_alloc(label, DIF_INSTR_NOP));
1225 dt_cg_array_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
1227 dt_probe_t *prp = yypcb->pcb_probe;
1228 uintmax_t saved = dnp->dn_args->dn_value;
1229 dt_ident_t *idp = dnp->dn_ident;
1236 assert(dnp->dn_kind == DT_NODE_VAR);
1237 assert(!(idp->di_flags & DT_IDFLG_LOCAL));
1239 assert(dnp->dn_args->dn_kind == DT_NODE_INT);
1240 assert(dnp->dn_args->dn_list == NULL);
1243 * If this is a reference in the args[] array, temporarily modify the
1244 * array index according to the static argument mapping (if any),
1245 * unless the argument reference is provided by a dynamic translator.
1246 * If we're using a dynamic translator for args[], then just set dn_reg
1247 * to an invalid reg and return: DIF_OP_XLARG will fetch the arg later.
1249 if (idp->di_id == DIF_VAR_ARGS) {
1250 if ((idp->di_kind == DT_IDENT_XLPTR ||
1251 idp->di_kind == DT_IDENT_XLSOU) &&
1252 dt_xlator_dynamic(idp->di_data)) {
1256 dnp->dn_args->dn_value = prp->pr_mapping[saved];
1259 dt_cg_node(dnp->dn_args, dlp, drp);
1260 dnp->dn_args->dn_value = saved;
1262 dnp->dn_reg = dnp->dn_args->dn_reg;
1264 if (idp->di_flags & DT_IDFLG_TLS)
1269 idp->di_flags |= DT_IDFLG_DIFR;
1271 instr = DIF_INSTR_LDA(op, idp->di_id,
1272 dnp->dn_args->dn_reg, dnp->dn_reg);
1274 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1277 * If this is a reference to the args[] array, we need to take the
1278 * additional step of explicitly eliminating any bits larger than the
1279 * type size: the DIF interpreter in the kernel will always give us
1280 * the raw (64-bit) argument value, and any bits larger than the type
1281 * size may be junk. As a practical matter, this arises only on 64-bit
1282 * architectures and only when the argument index is larger than the
1283 * number of arguments passed directly to DTrace: if a 8-, 16- or
1284 * 32-bit argument must be retrieved from the stack, it is possible
1285 * (and it some cases, likely) that the upper bits will be garbage.
1287 if (idp->di_id != DIF_VAR_ARGS || !dt_node_is_scalar(dnp))
1290 if ((size = dt_node_type_size(dnp)) == sizeof (uint64_t))
1293 reg = dt_regset_alloc(drp);
1294 assert(size < sizeof (uint64_t));
1295 n = sizeof (uint64_t) * NBBY - size * NBBY;
1297 dt_cg_setx(dlp, reg, n);
1299 instr = DIF_INSTR_FMT(DIF_OP_SLL, dnp->dn_reg, reg, dnp->dn_reg);
1300 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1302 instr = DIF_INSTR_FMT((dnp->dn_flags & DT_NF_SIGNED) ?
1303 DIF_OP_SRA : DIF_OP_SRL, dnp->dn_reg, reg, dnp->dn_reg);
1305 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1306 dt_regset_free(drp, reg);
1310 * Generate code for an inlined variable reference. Inlines can be used to
1311 * define either scalar or associative array substitutions. For scalars, we
1312 * simply generate code for the parse tree saved in the identifier's din_root,
1313 * and then cast the resulting expression to the inline's declaration type.
1314 * For arrays, we take the input parameter subtrees from dnp->dn_args and
1315 * temporarily store them in the din_root of each din_argv[i] identifier,
1316 * which are themselves inlines and were set up for us by the parser. The
1317 * result is that any reference to the inlined parameter inside the top-level
1318 * din_root will turn into a recursive call to dt_cg_inline() for a scalar
1319 * inline whose din_root will refer to the subtree pointed to by the argument.
1322 dt_cg_inline(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
1324 dt_ident_t *idp = dnp->dn_ident;
1325 dt_idnode_t *inp = idp->di_iarg;
1331 assert(idp->di_flags & DT_IDFLG_INLINE);
1332 assert(idp->di_ops == &dt_idops_inline);
1334 if (idp->di_kind == DT_IDENT_ARRAY) {
1335 for (i = 0, pnp = dnp->dn_args;
1336 pnp != NULL; pnp = pnp->dn_list, i++) {
1337 if (inp->din_argv[i] != NULL) {
1338 pinp = inp->din_argv[i]->di_iarg;
1339 pinp->din_root = pnp;
1344 dt_cg_node(inp->din_root, dlp, drp);
1345 dnp->dn_reg = inp->din_root->dn_reg;
1346 dt_cg_typecast(inp->din_root, dnp, dlp, drp);
1348 if (idp->di_kind == DT_IDENT_ARRAY) {
1349 for (i = 0; i < inp->din_argc; i++) {
1350 pinp = inp->din_argv[i]->di_iarg;
1351 pinp->din_root = NULL;
1357 dt_cg_func_typeref(dtrace_hdl_t *dtp, dt_node_t *dnp)
1359 dtrace_typeinfo_t dtt;
1360 dt_node_t *addr = dnp->dn_args;
1361 dt_node_t *nelm = addr->dn_list;
1362 dt_node_t *strp = nelm->dn_list;
1363 dt_node_t *typs = strp->dn_list;
1364 char buf[DT_TYPE_NAMELEN];
1367 ctf_type_name(addr->dn_ctfp, addr->dn_type, buf, sizeof (buf));
1370 * XXX Hack alert! XXX
1371 * The prototype has two dummy args that we munge to represent
1372 * the type string and the type size.
1374 * Yes, I hear your grumble, but it works for now. We'll come
1375 * up with a more elegant implementation later. :-)
1377 free(strp->dn_string);
1379 if ((p = strchr(buf, '*')) != NULL)
1382 strp->dn_string = strdup(buf);
1384 if (dtrace_lookup_by_type(dtp, DTRACE_OBJ_EVERY, buf, &dtt) < 0)
1387 typs->dn_value = ctf_type_size(dtt.dtt_ctfp, dtt.dtt_type);
1390 typedef struct dt_xlmemb {
1391 dt_ident_t *dtxl_idp; /* translated ident */
1392 dt_irlist_t *dtxl_dlp; /* instruction list */
1393 dt_regset_t *dtxl_drp; /* register set */
1394 int dtxl_sreg; /* location of the translation input */
1395 int dtxl_dreg; /* location of our allocated buffer */
1400 dt_cg_xlate_member(const char *name, ctf_id_t type, ulong_t off, void *arg)
1402 dt_xlmemb_t *dx = arg;
1403 dt_ident_t *idp = dx->dtxl_idp;
1404 dt_irlist_t *dlp = dx->dtxl_dlp;
1405 dt_regset_t *drp = dx->dtxl_drp;
1414 /* Generate code for the translation. */
1416 mnp = dt_xlator_member(dxp, name);
1418 /* If there's no translator for the given member, skip it. */
1422 dxp->dx_ident->di_flags |= DT_IDFLG_CGREG;
1423 dxp->dx_ident->di_id = dx->dtxl_sreg;
1425 dt_cg_node(mnp->dn_membexpr, dlp, drp);
1427 dxp->dx_ident->di_flags &= ~DT_IDFLG_CGREG;
1428 dxp->dx_ident->di_id = 0;
1430 treg = mnp->dn_membexpr->dn_reg;
1432 /* Compute the offset into our buffer and store the result there. */
1433 reg = dt_regset_alloc(drp);
1435 dt_cg_setx(dlp, reg, off / NBBY);
1436 instr = DIF_INSTR_FMT(DIF_OP_ADD, dx->dtxl_dreg, reg, reg);
1437 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1439 size = ctf_type_size(mnp->dn_membexpr->dn_ctfp,
1440 mnp->dn_membexpr->dn_type);
1441 if (dt_node_is_scalar(mnp->dn_membexpr)) {
1443 * Copying scalars is simple.
1447 instr = DIF_INSTR_STORE(DIF_OP_STB, treg, reg);
1450 instr = DIF_INSTR_STORE(DIF_OP_STH, treg, reg);
1453 instr = DIF_INSTR_STORE(DIF_OP_STW, treg, reg);
1456 instr = DIF_INSTR_STORE(DIF_OP_STX, treg, reg);
1459 xyerror(D_UNKNOWN, "internal error -- unexpected "
1460 "size: %lu\n", (ulong_t)size);
1463 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1465 } else if (dt_node_is_string(mnp->dn_membexpr)) {
1469 * Use the copys instruction for strings.
1471 szreg = dt_regset_alloc(drp);
1472 dt_cg_setx(dlp, szreg, size);
1473 instr = DIF_INSTR_COPYS(treg, szreg, reg);
1474 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1475 dt_regset_free(drp, szreg);
1480 * If it's anything else then we'll just bcopy it.
1482 szreg = dt_regset_alloc(drp);
1483 dt_cg_setx(dlp, szreg, size);
1484 dt_irlist_append(dlp,
1485 dt_cg_node_alloc(DT_LBL_NONE, DIF_INSTR_FLUSHTS));
1486 instr = DIF_INSTR_PUSHTS(DIF_OP_PUSHTV, DIF_TYPE_CTF,
1488 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1489 instr = DIF_INSTR_PUSHTS(DIF_OP_PUSHTV, DIF_TYPE_CTF,
1491 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1492 instr = DIF_INSTR_PUSHTS(DIF_OP_PUSHTV, DIF_TYPE_CTF,
1494 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1495 instr = DIF_INSTR_CALL(DIF_SUBR_BCOPY, szreg);
1496 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1497 dt_regset_free(drp, szreg);
1500 dt_regset_free(drp, reg);
1501 dt_regset_free(drp, treg);
1507 * If we're expanding a translated type, we create an appropriately sized
1508 * buffer with alloca() and then translate each member into it.
1511 dt_cg_xlate_expand(dt_node_t *dnp, dt_ident_t *idp, dt_irlist_t *dlp,
1519 dreg = dt_regset_alloc(drp);
1520 size = ctf_type_size(dnp->dn_ident->di_ctfp, dnp->dn_ident->di_type);
1522 /* Call alloca() to create the buffer. */
1523 dt_cg_setx(dlp, dreg, size);
1525 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, DIF_INSTR_FLUSHTS));
1527 instr = DIF_INSTR_PUSHTS(DIF_OP_PUSHTV, DIF_TYPE_CTF, DIF_REG_R0, dreg);
1528 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1530 instr = DIF_INSTR_CALL(DIF_SUBR_ALLOCA, dreg);
1531 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1533 /* Generate the translation for each member. */
1537 dlm.dtxl_sreg = dnp->dn_reg;
1538 dlm.dtxl_dreg = dreg;
1539 (void) ctf_member_iter(dnp->dn_ident->di_ctfp,
1540 dnp->dn_ident->di_type, dt_cg_xlate_member,
1547 dt_cg_node(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
1549 ctf_file_t *ctfp = dnp->dn_ctfp;
1559 switch (dnp->dn_op) {
1561 dt_cg_node(dnp->dn_left, dlp, drp);
1562 dt_regset_free(drp, dnp->dn_left->dn_reg);
1563 dt_cg_node(dnp->dn_right, dlp, drp);
1564 dnp->dn_reg = dnp->dn_right->dn_reg;
1568 dt_cg_node(dnp->dn_right, dlp, drp);
1569 dnp->dn_reg = dnp->dn_right->dn_reg;
1570 dt_cg_asgn_op(dnp, dlp, drp);
1574 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_ADD);
1575 dt_cg_asgn_op(dnp, dlp, drp);
1579 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_SUB);
1580 dt_cg_asgn_op(dnp, dlp, drp);
1584 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_MUL);
1585 dt_cg_asgn_op(dnp, dlp, drp);
1589 dt_cg_arithmetic_op(dnp, dlp, drp,
1590 (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SDIV : DIF_OP_UDIV);
1591 dt_cg_asgn_op(dnp, dlp, drp);
1595 dt_cg_arithmetic_op(dnp, dlp, drp,
1596 (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SREM : DIF_OP_UREM);
1597 dt_cg_asgn_op(dnp, dlp, drp);
1601 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_AND);
1602 dt_cg_asgn_op(dnp, dlp, drp);
1606 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_XOR);
1607 dt_cg_asgn_op(dnp, dlp, drp);
1611 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_OR);
1612 dt_cg_asgn_op(dnp, dlp, drp);
1616 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_SLL);
1617 dt_cg_asgn_op(dnp, dlp, drp);
1621 dt_cg_arithmetic_op(dnp, dlp, drp,
1622 (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SRA : DIF_OP_SRL);
1623 dt_cg_asgn_op(dnp, dlp, drp);
1626 case DT_TOK_QUESTION:
1627 dt_cg_ternary_op(dnp, dlp, drp);
1631 dt_cg_logical_or(dnp, dlp, drp);
1635 dt_cg_logical_xor(dnp, dlp, drp);
1639 dt_cg_logical_and(dnp, dlp, drp);
1643 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_OR);
1647 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_XOR);
1651 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_AND);
1655 dt_cg_compare_op(dnp, dlp, drp, DIF_OP_BE);
1659 dt_cg_compare_op(dnp, dlp, drp, DIF_OP_BNE);
1663 dt_cg_compare_op(dnp, dlp, drp,
1664 dt_cg_compare_signed(dnp) ? DIF_OP_BL : DIF_OP_BLU);
1668 dt_cg_compare_op(dnp, dlp, drp,
1669 dt_cg_compare_signed(dnp) ? DIF_OP_BLE : DIF_OP_BLEU);
1673 dt_cg_compare_op(dnp, dlp, drp,
1674 dt_cg_compare_signed(dnp) ? DIF_OP_BG : DIF_OP_BGU);
1678 dt_cg_compare_op(dnp, dlp, drp,
1679 dt_cg_compare_signed(dnp) ? DIF_OP_BGE : DIF_OP_BGEU);
1683 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_SLL);
1687 dt_cg_arithmetic_op(dnp, dlp, drp,
1688 (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SRA : DIF_OP_SRL);
1692 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_ADD);
1696 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_SUB);
1700 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_MUL);
1704 dt_cg_arithmetic_op(dnp, dlp, drp,
1705 (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SDIV : DIF_OP_UDIV);
1709 dt_cg_arithmetic_op(dnp, dlp, drp,
1710 (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SREM : DIF_OP_UREM);
1714 dt_cg_logical_neg(dnp, dlp, drp);
1718 dt_cg_node(dnp->dn_child, dlp, drp);
1719 dnp->dn_reg = dnp->dn_child->dn_reg;
1720 instr = DIF_INSTR_NOT(dnp->dn_reg, dnp->dn_reg);
1721 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1725 dt_cg_prearith_op(dnp, dlp, drp, DIF_OP_ADD);
1728 case DT_TOK_POSTINC:
1729 dt_cg_postarith_op(dnp, dlp, drp, DIF_OP_ADD);
1733 dt_cg_prearith_op(dnp, dlp, drp, DIF_OP_SUB);
1736 case DT_TOK_POSTDEC:
1737 dt_cg_postarith_op(dnp, dlp, drp, DIF_OP_SUB);
1741 dt_cg_node(dnp->dn_child, dlp, drp);
1742 dnp->dn_reg = dnp->dn_child->dn_reg;
1746 dt_cg_node(dnp->dn_child, dlp, drp);
1747 dnp->dn_reg = dnp->dn_child->dn_reg;
1749 instr = DIF_INSTR_FMT(DIF_OP_SUB, DIF_REG_R0,
1750 dnp->dn_reg, dnp->dn_reg);
1752 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1756 dt_cg_node(dnp->dn_child, dlp, drp);
1757 dnp->dn_reg = dnp->dn_child->dn_reg;
1759 if (dt_node_is_dynamic(dnp->dn_child)) {
1761 idp = dt_node_resolve(dnp->dn_child, DT_IDENT_XLPTR);
1762 assert(idp != NULL);
1763 reg = dt_cg_xlate_expand(dnp, idp, dlp, drp);
1765 dt_regset_free(drp, dnp->dn_child->dn_reg);
1768 } else if (!(dnp->dn_flags & DT_NF_REF)) {
1769 uint_t ubit = dnp->dn_flags & DT_NF_USERLAND;
1772 * Save and restore DT_NF_USERLAND across dt_cg_load():
1773 * we need the sign bit from dnp and the user bit from
1774 * dnp->dn_child in order to get the proper opcode.
1777 (dnp->dn_child->dn_flags & DT_NF_USERLAND);
1779 instr = DIF_INSTR_LOAD(dt_cg_load(dnp, ctfp,
1780 dnp->dn_type), dnp->dn_reg, dnp->dn_reg);
1782 dnp->dn_flags &= ~DT_NF_USERLAND;
1783 dnp->dn_flags |= ubit;
1785 dt_irlist_append(dlp,
1786 dt_cg_node_alloc(DT_LBL_NONE, instr));
1790 case DT_TOK_ADDROF: {
1791 uint_t rbit = dnp->dn_child->dn_flags & DT_NF_REF;
1793 dnp->dn_child->dn_flags |= DT_NF_REF; /* force pass-by-ref */
1794 dt_cg_node(dnp->dn_child, dlp, drp);
1795 dnp->dn_reg = dnp->dn_child->dn_reg;
1797 dnp->dn_child->dn_flags &= ~DT_NF_REF;
1798 dnp->dn_child->dn_flags |= rbit;
1802 case DT_TOK_SIZEOF: {
1803 size_t size = dt_node_sizeof(dnp->dn_child);
1804 dnp->dn_reg = dt_regset_alloc(drp);
1806 dt_cg_setx(dlp, dnp->dn_reg, size);
1810 case DT_TOK_STRINGOF:
1811 dt_cg_node(dnp->dn_child, dlp, drp);
1812 dnp->dn_reg = dnp->dn_child->dn_reg;
1817 * An xlate operator appears in either an XLATOR, indicating a
1818 * reference to a dynamic translator, or an OP2, indicating
1819 * use of the xlate operator in the user's program. For the
1820 * dynamic case, generate an xlate opcode with a reference to
1821 * the corresponding member, pre-computed for us in dn_members.
1823 if (dnp->dn_kind == DT_NODE_XLATOR) {
1824 dt_xlator_t *dxp = dnp->dn_xlator;
1826 assert(dxp->dx_ident->di_flags & DT_IDFLG_CGREG);
1827 assert(dxp->dx_ident->di_id != 0);
1829 dnp->dn_reg = dt_regset_alloc(drp);
1831 if (dxp->dx_arg == -1) {
1832 instr = DIF_INSTR_MOV(
1833 dxp->dx_ident->di_id, dnp->dn_reg);
1834 dt_irlist_append(dlp,
1835 dt_cg_node_alloc(DT_LBL_NONE, instr));
1840 instr = DIF_INSTR_XLATE(op, 0, dnp->dn_reg);
1841 dt_irlist_append(dlp,
1842 dt_cg_node_alloc(DT_LBL_NONE, instr));
1844 dlp->dl_last->di_extern = dnp->dn_xmember;
1848 assert(dnp->dn_kind == DT_NODE_OP2);
1849 dt_cg_node(dnp->dn_right, dlp, drp);
1850 dnp->dn_reg = dnp->dn_right->dn_reg;
1854 dt_cg_node(dnp->dn_right, dlp, drp);
1855 dnp->dn_reg = dnp->dn_right->dn_reg;
1856 dt_cg_typecast(dnp->dn_right, dnp, dlp, drp);
1861 assert(dnp->dn_right->dn_kind == DT_NODE_IDENT);
1862 dt_cg_node(dnp->dn_left, dlp, drp);
1865 * If the left-hand side of PTR or DOT is a dynamic variable,
1866 * we expect it to be the output of a D translator. In this
1867 * case, we look up the parse tree corresponding to the member
1868 * that is being accessed and run the code generator over it.
1869 * We then cast the result as if by the assignment operator.
1871 if ((idp = dt_node_resolve(
1872 dnp->dn_left, DT_IDENT_XLSOU)) != NULL ||
1873 (idp = dt_node_resolve(
1874 dnp->dn_left, DT_IDENT_XLPTR)) != NULL) {
1880 mnp = dt_xlator_member(dxp, dnp->dn_right->dn_string);
1881 assert(mnp != NULL);
1883 dxp->dx_ident->di_flags |= DT_IDFLG_CGREG;
1884 dxp->dx_ident->di_id = dnp->dn_left->dn_reg;
1886 dt_cg_node(mnp->dn_membexpr, dlp, drp);
1887 dnp->dn_reg = mnp->dn_membexpr->dn_reg;
1888 dt_cg_typecast(mnp->dn_membexpr, dnp, dlp, drp);
1890 dxp->dx_ident->di_flags &= ~DT_IDFLG_CGREG;
1891 dxp->dx_ident->di_id = 0;
1893 if (dnp->dn_left->dn_reg != -1)
1894 dt_regset_free(drp, dnp->dn_left->dn_reg);
1898 ctfp = dnp->dn_left->dn_ctfp;
1899 type = ctf_type_resolve(ctfp, dnp->dn_left->dn_type);
1901 if (dnp->dn_op == DT_TOK_PTR) {
1902 type = ctf_type_reference(ctfp, type);
1903 type = ctf_type_resolve(ctfp, type);
1906 if ((ctfp = dt_cg_membinfo(octfp = ctfp, type,
1907 dnp->dn_right->dn_string, &m)) == NULL) {
1908 yypcb->pcb_hdl->dt_ctferr = ctf_errno(octfp);
1909 longjmp(yypcb->pcb_jmpbuf, EDT_CTF);
1912 if (m.ctm_offset != 0) {
1915 reg = dt_regset_alloc(drp);
1918 * If the offset is not aligned on a byte boundary, it
1919 * is a bit-field member and we will extract the value
1920 * bits below after we generate the appropriate load.
1922 dt_cg_setx(dlp, reg, m.ctm_offset / NBBY);
1924 instr = DIF_INSTR_FMT(DIF_OP_ADD,
1925 dnp->dn_left->dn_reg, reg, dnp->dn_left->dn_reg);
1927 dt_irlist_append(dlp,
1928 dt_cg_node_alloc(DT_LBL_NONE, instr));
1929 dt_regset_free(drp, reg);
1932 if (!(dnp->dn_flags & DT_NF_REF)) {
1933 uint_t ubit = dnp->dn_flags & DT_NF_USERLAND;
1936 * Save and restore DT_NF_USERLAND across dt_cg_load():
1937 * we need the sign bit from dnp and the user bit from
1938 * dnp->dn_left in order to get the proper opcode.
1941 (dnp->dn_left->dn_flags & DT_NF_USERLAND);
1943 instr = DIF_INSTR_LOAD(dt_cg_load(dnp,
1944 ctfp, m.ctm_type), dnp->dn_left->dn_reg,
1945 dnp->dn_left->dn_reg);
1947 dnp->dn_flags &= ~DT_NF_USERLAND;
1948 dnp->dn_flags |= ubit;
1950 dt_irlist_append(dlp,
1951 dt_cg_node_alloc(DT_LBL_NONE, instr));
1953 if (dnp->dn_flags & DT_NF_BITFIELD)
1954 dt_cg_field_get(dnp, dlp, drp, ctfp, &m);
1957 dnp->dn_reg = dnp->dn_left->dn_reg;
1961 dnp->dn_reg = dt_regset_alloc(drp);
1963 assert(dnp->dn_kind == DT_NODE_STRING);
1964 stroff = dt_strtab_insert(yypcb->pcb_strtab, dnp->dn_string);
1967 longjmp(yypcb->pcb_jmpbuf, EDT_NOMEM);
1968 if (stroff > DIF_STROFF_MAX)
1969 longjmp(yypcb->pcb_jmpbuf, EDT_STR2BIG);
1971 instr = DIF_INSTR_SETS((ulong_t)stroff, dnp->dn_reg);
1972 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
1977 * If the specified identifier is a variable on which we have
1978 * set the code generator register flag, then this variable
1979 * has already had code generated for it and saved in di_id.
1980 * Allocate a new register and copy the existing value to it.
1982 if (dnp->dn_kind == DT_NODE_VAR &&
1983 (dnp->dn_ident->di_flags & DT_IDFLG_CGREG)) {
1984 dnp->dn_reg = dt_regset_alloc(drp);
1985 instr = DIF_INSTR_MOV(dnp->dn_ident->di_id,
1987 dt_irlist_append(dlp,
1988 dt_cg_node_alloc(DT_LBL_NONE, instr));
1993 * Identifiers can represent function calls, variable refs, or
1994 * symbols. First we check for inlined variables, and handle
1995 * them by generating code for the inline parse tree.
1997 if (dnp->dn_kind == DT_NODE_VAR &&
1998 (dnp->dn_ident->di_flags & DT_IDFLG_INLINE)) {
1999 dt_cg_inline(dnp, dlp, drp);
2003 switch (dnp->dn_kind) {
2004 case DT_NODE_FUNC: {
2005 dtrace_hdl_t *dtp = yypcb->pcb_hdl;
2007 if ((idp = dnp->dn_ident)->di_kind != DT_IDENT_FUNC) {
2008 dnerror(dnp, D_CG_EXPR, "%s %s( ) may not be "
2009 "called from a D expression (D program "
2010 "context required)\n",
2011 dt_idkind_name(idp->di_kind), idp->di_name);
2014 switch (idp->di_id) {
2015 case DIF_SUBR_TYPEREF:
2016 dt_cg_func_typeref(dtp, dnp);
2023 dt_cg_arglist(dnp->dn_ident, dnp->dn_args, dlp, drp);
2025 dnp->dn_reg = dt_regset_alloc(drp);
2026 instr = DIF_INSTR_CALL(dnp->dn_ident->di_id,
2029 dt_irlist_append(dlp,
2030 dt_cg_node_alloc(DT_LBL_NONE, instr));
2036 if (dnp->dn_ident->di_kind == DT_IDENT_XLSOU ||
2037 dnp->dn_ident->di_kind == DT_IDENT_XLPTR) {
2039 * This can only happen if we have translated
2040 * args[]. See dt_idcook_args() for details.
2042 assert(dnp->dn_ident->di_id == DIF_VAR_ARGS);
2043 dt_cg_array_op(dnp, dlp, drp);
2047 if (dnp->dn_ident->di_kind == DT_IDENT_ARRAY) {
2048 if (dnp->dn_ident->di_id > DIF_VAR_ARRAY_MAX)
2049 dt_cg_assoc_op(dnp, dlp, drp);
2051 dt_cg_array_op(dnp, dlp, drp);
2055 dnp->dn_reg = dt_regset_alloc(drp);
2057 if (dnp->dn_ident->di_flags & DT_IDFLG_LOCAL)
2059 else if (dnp->dn_ident->di_flags & DT_IDFLG_TLS)
2064 dnp->dn_ident->di_flags |= DT_IDFLG_DIFR;
2066 instr = DIF_INSTR_LDV(op,
2067 dnp->dn_ident->di_id, dnp->dn_reg);
2069 dt_irlist_append(dlp,
2070 dt_cg_node_alloc(DT_LBL_NONE, instr));
2074 dtrace_hdl_t *dtp = yypcb->pcb_hdl;
2075 dtrace_syminfo_t *sip = dnp->dn_ident->di_data;
2078 if (dtrace_lookup_by_name(dtp,
2079 sip->dts_object, sip->dts_name, &sym, NULL) == -1) {
2080 xyerror(D_UNKNOWN, "cg failed for symbol %s`%s:"
2081 " %s\n", sip->dts_object, sip->dts_name,
2082 dtrace_errmsg(dtp, dtrace_errno(dtp)));
2085 dnp->dn_reg = dt_regset_alloc(drp);
2086 dt_cg_xsetx(dlp, dnp->dn_ident,
2087 DT_LBL_NONE, dnp->dn_reg, sym.st_value);
2089 if (!(dnp->dn_flags & DT_NF_REF)) {
2090 instr = DIF_INSTR_LOAD(dt_cg_load(dnp, ctfp,
2091 dnp->dn_type), dnp->dn_reg, dnp->dn_reg);
2092 dt_irlist_append(dlp,
2093 dt_cg_node_alloc(DT_LBL_NONE, instr));
2099 xyerror(D_UNKNOWN, "internal error -- node type %u is "
2100 "not valid for an identifier\n", dnp->dn_kind);
2105 dnp->dn_reg = dt_regset_alloc(drp);
2106 dt_cg_setx(dlp, dnp->dn_reg, dnp->dn_value);
2110 xyerror(D_UNKNOWN, "internal error -- token type %u is not a "
2111 "valid D compilation token\n", dnp->dn_op);
2116 dt_cg(dt_pcb_t *pcb, dt_node_t *dnp)
2122 if (pcb->pcb_regs == NULL && (pcb->pcb_regs =
2123 dt_regset_create(pcb->pcb_hdl->dt_conf.dtc_difintregs)) == NULL)
2124 longjmp(pcb->pcb_jmpbuf, EDT_NOMEM);
2126 dt_regset_reset(pcb->pcb_regs);
2127 (void) dt_regset_alloc(pcb->pcb_regs); /* allocate %r0 */
2129 if (pcb->pcb_inttab != NULL)
2130 dt_inttab_destroy(pcb->pcb_inttab);
2132 if ((pcb->pcb_inttab = dt_inttab_create(yypcb->pcb_hdl)) == NULL)
2133 longjmp(pcb->pcb_jmpbuf, EDT_NOMEM);
2135 if (pcb->pcb_strtab != NULL)
2136 dt_strtab_destroy(pcb->pcb_strtab);
2138 if ((pcb->pcb_strtab = dt_strtab_create(BUFSIZ)) == NULL)
2139 longjmp(pcb->pcb_jmpbuf, EDT_NOMEM);
2141 dt_irlist_destroy(&pcb->pcb_ir);
2142 dt_irlist_create(&pcb->pcb_ir);
2144 assert(pcb->pcb_dret == NULL);
2145 pcb->pcb_dret = dnp;
2147 if (dt_node_resolve(dnp, DT_IDENT_XLPTR) != NULL) {
2148 dnerror(dnp, D_CG_DYN, "expression cannot evaluate to result "
2149 "of a translated pointer\n");
2153 * If we're generating code for a translator body, assign the input
2154 * parameter to the first available register (i.e. caller passes %r1).
2156 if (dnp->dn_kind == DT_NODE_MEMBER) {
2157 dxp = dnp->dn_membxlator;
2158 dnp = dnp->dn_membexpr;
2160 dxp->dx_ident->di_flags |= DT_IDFLG_CGREG;
2161 dxp->dx_ident->di_id = dt_regset_alloc(pcb->pcb_regs);
2164 dt_cg_node(dnp, &pcb->pcb_ir, pcb->pcb_regs);
2166 if ((idp = dt_node_resolve(dnp, DT_IDENT_XLSOU)) != NULL) {
2167 int reg = dt_cg_xlate_expand(dnp, idp,
2168 &pcb->pcb_ir, pcb->pcb_regs);
2169 dt_regset_free(pcb->pcb_regs, dnp->dn_reg);
2173 instr = DIF_INSTR_RET(dnp->dn_reg);
2174 dt_regset_free(pcb->pcb_regs, dnp->dn_reg);
2175 dt_irlist_append(&pcb->pcb_ir, dt_cg_node_alloc(DT_LBL_NONE, instr));
2177 if (dnp->dn_kind == DT_NODE_MEMBER) {
2178 dt_regset_free(pcb->pcb_regs, dxp->dx_ident->di_id);
2179 dxp->dx_ident->di_id = 0;
2180 dxp->dx_ident->di_flags &= ~DT_IDFLG_CGREG;
2183 dt_regset_free(pcb->pcb_regs, 0);
2184 dt_regset_assert_free(pcb->pcb_regs);