root/lj_iropt.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. lj_ir_set_
  2. lj_ir_nextins
  3. lj_ir_knum

   1 /*
   2 ** Common header for IR emitter and optimizations.
   3 ** Copyright (C) 2005-2017 Mike Pall. See Copyright Notice in luajit.h
   4 */
   5 
   6 #ifndef _LJ_IROPT_H
   7 #define _LJ_IROPT_H
   8 
   9 #include <stdarg.h>
  10 
  11 #include "lj_obj.h"
  12 #include "lj_jit.h"
  13 
  14 #if LJ_HASJIT
  15 /* IR emitter. */
  16 LJ_FUNC void LJ_FASTCALL lj_ir_growtop(jit_State *J);
  17 LJ_FUNC TRef LJ_FASTCALL lj_ir_emit(jit_State *J);
  18 
  19 /* Save current IR in J->fold.ins, but do not emit it (yet). */
  20 static LJ_AINLINE void lj_ir_set_(jit_State *J, uint16_t ot, IRRef1 a, IRRef1 b)
  21 {
  22   J->fold.ins.ot = ot; J->fold.ins.op1 = a; J->fold.ins.op2 = b;
  23 }
  24 
  25 #define lj_ir_set(J, ot, a, b) \
  26   lj_ir_set_(J, (uint16_t)(ot), (IRRef1)(a), (IRRef1)(b))
  27 
  28 /* Get ref of next IR instruction and optionally grow IR.
  29 ** Note: this may invalidate all IRIns*!
  30 */
  31 static LJ_AINLINE IRRef lj_ir_nextins(jit_State *J)
  32 {
  33   IRRef ref = J->cur.nins;
  34   if (LJ_UNLIKELY(ref >= J->irtoplim)) lj_ir_growtop(J);
  35   J->cur.nins = ref + 1;
  36   return ref;
  37 }
  38 
  39 LJ_FUNC TRef lj_ir_ggfload(jit_State *J, IRType t, uintptr_t ofs);
  40 
  41 /* Interning of constants. */
  42 LJ_FUNC TRef LJ_FASTCALL lj_ir_kint(jit_State *J, int32_t k);
  43 LJ_FUNC TRef lj_ir_k64(jit_State *J, IROp op, uint64_t u64);
  44 LJ_FUNC TRef lj_ir_knum_u64(jit_State *J, uint64_t u64);
  45 LJ_FUNC TRef lj_ir_knumint(jit_State *J, lua_Number n);
  46 LJ_FUNC TRef lj_ir_kint64(jit_State *J, uint64_t u64);
  47 LJ_FUNC TRef lj_ir_kgc(jit_State *J, GCobj *o, IRType t);
  48 LJ_FUNC TRef lj_ir_kptr_(jit_State *J, IROp op, void *ptr);
  49 LJ_FUNC TRef lj_ir_knull(jit_State *J, IRType t);
  50 LJ_FUNC TRef lj_ir_kslot(jit_State *J, TRef key, IRRef slot);
  51 LJ_FUNC TRef lj_ir_ktrace(jit_State *J);
  52 
  53 #if LJ_64
  54 #define lj_ir_kintp(J, k)       lj_ir_kint64(J, (uint64_t)(k))
  55 #else
  56 #define lj_ir_kintp(J, k)       lj_ir_kint(J, (int32_t)(k))
  57 #endif
  58 
  59 static LJ_AINLINE TRef lj_ir_knum(jit_State *J, lua_Number n)
  60 {
  61   TValue tv;
  62   tv.n = n;
  63   return lj_ir_knum_u64(J, tv.u64);
  64 }
  65 
  66 #define lj_ir_kstr(J, str)      lj_ir_kgc(J, obj2gco((str)), IRT_STR)
  67 #define lj_ir_ktab(J, tab)      lj_ir_kgc(J, obj2gco((tab)), IRT_TAB)
  68 #define lj_ir_kfunc(J, func)    lj_ir_kgc(J, obj2gco((func)), IRT_FUNC)
  69 #define lj_ir_kptr(J, ptr)      lj_ir_kptr_(J, IR_KPTR, (ptr))
  70 #define lj_ir_kkptr(J, ptr)     lj_ir_kptr_(J, IR_KKPTR, (ptr))
  71 
  72 /* Special FP constants. */
  73 #define lj_ir_knum_zero(J)      lj_ir_knum_u64(J, U64x(00000000,00000000))
  74 #define lj_ir_knum_one(J)       lj_ir_knum_u64(J, U64x(3ff00000,00000000))
  75 #define lj_ir_knum_tobit(J)     lj_ir_knum_u64(J, U64x(43380000,00000000))
  76 
  77 /* Special 128 bit SIMD constants. */
  78 #define lj_ir_ksimd(J, idx) \
  79   lj_ir_ggfload(J, IRT_NUM, (uintptr_t)LJ_KSIMD(J, idx) - (uintptr_t)J2GG(J))
  80 
  81 /* Access to constants. */
  82 LJ_FUNC void lj_ir_kvalue(lua_State *L, TValue *tv, const IRIns *ir);
  83 
  84 /* Convert IR operand types. */
  85 LJ_FUNC TRef LJ_FASTCALL lj_ir_tonumber(jit_State *J, TRef tr);
  86 LJ_FUNC TRef LJ_FASTCALL lj_ir_tonum(jit_State *J, TRef tr);
  87 LJ_FUNC TRef LJ_FASTCALL lj_ir_tostr(jit_State *J, TRef tr);
  88 
  89 /* Miscellaneous IR ops. */
  90 LJ_FUNC int lj_ir_numcmp(lua_Number a, lua_Number b, IROp op);
  91 LJ_FUNC int lj_ir_strcmp(GCstr *a, GCstr *b, IROp op);
  92 LJ_FUNC void lj_ir_rollback(jit_State *J, IRRef ref);
  93 
  94 /* Emit IR instructions with on-the-fly optimizations. */
  95 LJ_FUNC TRef LJ_FASTCALL lj_opt_fold(jit_State *J);
  96 LJ_FUNC TRef LJ_FASTCALL lj_opt_cse(jit_State *J);
  97 LJ_FUNC TRef LJ_FASTCALL lj_opt_cselim(jit_State *J, IRRef lim);
  98 
  99 /* Special return values for the fold functions. */
 100 enum {
 101   NEXTFOLD,             /* Couldn't fold, pass on. */
 102   RETRYFOLD,            /* Retry fold with modified fins. */
 103   KINTFOLD,             /* Return ref for int constant in fins->i. */
 104   FAILFOLD,             /* Guard would always fail. */
 105   DROPFOLD,             /* Guard eliminated. */
 106   MAX_FOLD
 107 };
 108 
 109 #define INTFOLD(k)      ((J->fold.ins.i = (k)), (TRef)KINTFOLD)
 110 #define INT64FOLD(k)    (lj_ir_kint64(J, (k)))
 111 #define CONDFOLD(cond)  ((TRef)FAILFOLD + (TRef)(cond))
 112 #define LEFTFOLD        (J->fold.ins.op1)
 113 #define RIGHTFOLD       (J->fold.ins.op2)
 114 #define CSEFOLD         (lj_opt_cse(J))
 115 #define EMITFOLD        (lj_ir_emit(J))
 116 
 117 /* Load/store forwarding. */
 118 LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_aload(jit_State *J);
 119 LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_hload(jit_State *J);
 120 LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_uload(jit_State *J);
 121 LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_fload(jit_State *J);
 122 LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_xload(jit_State *J);
 123 LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_tab_len(jit_State *J);
 124 LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_hrefk(jit_State *J);
 125 LJ_FUNC int LJ_FASTCALL lj_opt_fwd_href_nokey(jit_State *J);
 126 LJ_FUNC int LJ_FASTCALL lj_opt_fwd_tptr(jit_State *J, IRRef lim);
 127 LJ_FUNC int lj_opt_fwd_wasnonnil(jit_State *J, IROpT loadop, IRRef xref);
 128 
 129 /* Dead-store elimination. */
 130 LJ_FUNC TRef LJ_FASTCALL lj_opt_dse_ahstore(jit_State *J);
 131 LJ_FUNC TRef LJ_FASTCALL lj_opt_dse_ustore(jit_State *J);
 132 LJ_FUNC TRef LJ_FASTCALL lj_opt_dse_fstore(jit_State *J);
 133 LJ_FUNC TRef LJ_FASTCALL lj_opt_dse_xstore(jit_State *J);
 134 
 135 /* Narrowing. */
 136 LJ_FUNC TRef LJ_FASTCALL lj_opt_narrow_convert(jit_State *J);
 137 LJ_FUNC TRef LJ_FASTCALL lj_opt_narrow_index(jit_State *J, TRef key);
 138 LJ_FUNC TRef LJ_FASTCALL lj_opt_narrow_toint(jit_State *J, TRef tr);
 139 LJ_FUNC TRef LJ_FASTCALL lj_opt_narrow_tobit(jit_State *J, TRef tr);
 140 #if LJ_HASFFI
 141 LJ_FUNC TRef LJ_FASTCALL lj_opt_narrow_cindex(jit_State *J, TRef key);
 142 #endif
 143 LJ_FUNC TRef lj_opt_narrow_arith(jit_State *J, TRef rb, TRef rc,
 144                                  TValue *vb, TValue *vc, IROp op);
 145 LJ_FUNC TRef lj_opt_narrow_unm(jit_State *J, TRef rc, TValue *vc);
 146 LJ_FUNC TRef lj_opt_narrow_mod(jit_State *J, TRef rb, TRef rc, TValue *vb, TValue *vc);
 147 LJ_FUNC TRef lj_opt_narrow_pow(jit_State *J, TRef rb, TRef rc, TValue *vb, TValue *vc);
 148 LJ_FUNC IRType lj_opt_narrow_forl(jit_State *J, cTValue *forbase);
 149 
 150 /* Optimization passes. */
 151 LJ_FUNC void lj_opt_dce(jit_State *J);
 152 LJ_FUNC int lj_opt_loop(jit_State *J);
 153 #if LJ_SOFTFP32 || (LJ_32 && LJ_HASFFI)
 154 LJ_FUNC void lj_opt_split(jit_State *J);
 155 #else
 156 #define lj_opt_split(J)         UNUSED(J)
 157 #endif
 158 LJ_FUNC void lj_opt_sink(jit_State *J);
 159 
 160 #endif
 161 
 162 #endif

/* [<][>][^][v][top][bottom][index][help] */