userspace/kp_parse.c - ktap

Global variables defined

Data types defined

Functions defined

Macros defined

Source code

  1. /*
  2. * ktap parser (source code -> bytecode).
  3. *
  4. * This file is part of ktap by Jovi Zhangwei.
  5. *
  6. * Copyright (C) 2012-2014 Jovi Zhangwei <jovi.zhangwei@gmail.com>.
  7. *
  8. * Adapted from luajit and lua interpreter.
  9. * Copyright (C) 2005-2014 Mike Pall.
  10. * Copyright (C) 1994-2008 Lua.org, PUC-Rio.
  11. *
  12. * ktap is free software; you can redistribute it and/or modify it
  13. * under the terms and conditions of the GNU General Public License,
  14. * version 2, as published by the Free Software Foundation.
  15. *
  16. * ktap is distributed in the hope it will be useful, but WITHOUT
  17. * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  18. * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
  19. * more details.
  20. *
  21. * You should have received a copy of the GNU General Public License along with
  22. * this program; if not, write to the Free Software Foundation, Inc.,
  23. * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
  24. */

  25. #include "../include/ktap_types.h"
  26. #include "../include/ktap_err.h"
  27. #include "kp_util.h"
  28. #include "kp_lex.h"

  29. /* Fixed internal variable names. */
  30. #define VARNAMEDEF(_) \
  31.     _(FOR_IDX, "(for index)") \
  32.     _(FOR_STOP, "(for limit)") \
  33.     _(FOR_STEP, "(for step)") \
  34.     _(FOR_GEN, "(for generator)") \
  35.     _(FOR_STATE, "(for state)") \
  36.     _(FOR_CTL, "(for control)")

  37. enum {
  38.     VARNAME_END,
  39. #define VARNAMEENUM(name, str)  VARNAME_##name,
  40.     VARNAMEDEF(VARNAMEENUM)
  41. #undef VARNAMEENUM
  42.     VARNAME__MAX
  43. };

  44. /* -- Parser structures and definitions ----------------------------------- */

  45. /* Expression kinds. */
  46. typedef enum {
  47.     /* Constant expressions must be first and in this order: */
  48.     VKNIL,
  49.     VKFALSE,
  50.     VKTRUE,
  51.     VKSTR,    /* sval = string value */
  52.     VKNUM,    /* nval = number value */
  53.     VKLAST = VKNUM,
  54.     VKCDATA, /* nval = cdata value, not treated as a constant expression */
  55.     /* Non-constant expressions follow: */
  56.     VLOCAL,    /* info = local register, aux = vstack index */
  57.     VUPVAL,    /* info = upvalue index, aux = vstack index */
  58.     VGLOBAL,/* sval = string value */
  59.     VINDEXED,/* info = table register, aux = index reg/byte/string const */
  60.     VJMP,    /* info = instruction PC */
  61.     VRELOCABLE, /* info = instruction PC */
  62.     VNONRELOC, /* info = result register */
  63.     VCALL,    /* info = instruction PC, aux = base */
  64.     VVOID,

  65.     VARGN,
  66.     VARGSTR,
  67.     VARGNAME,
  68.     VPID,
  69.     VTID,
  70.     VUID,
  71.     VCPU,
  72.     VEXECNAME,
  73.     VMAX
  74. } ExpKind;

  75. /* Expression descriptor. */
  76. typedef struct ExpDesc {
  77.     union {
  78.         struct {
  79.             uint32_t info;    /* Primary info. */
  80.             uint32_t aux;    /* Secondary info. */
  81.         } s;
  82.         ktap_val_t nval;    /* Number value. */
  83.         ktap_str_t *sval;    /* String value. */
  84.     } u;
  85.     ExpKind k;
  86.     BCPos t;    /* True condition jump list. */
  87.     BCPos f;    /* False condition jump list. */
  88. } ExpDesc;

  89. /* Macros for expressions. */
  90. #define expr_hasjump(e)        ((e)->t != (e)->f)

  91. #define expr_isk(e)        ((e)->k <= VKLAST)
  92. #define expr_isk_nojump(e)    (expr_isk(e) && !expr_hasjump(e))
  93. #define expr_isnumk(e)        ((e)->k == VKNUM)
  94. #define expr_isnumk_nojump(e)    (expr_isnumk(e) && !expr_hasjump(e))
  95. #define expr_isstrk(e)        ((e)->k == VKSTR)

  96. #define expr_numtv(e)        (&(e)->u.nval)
  97. #define expr_numberV(e)        nvalue(expr_numtv((e)))

  98. /* Initialize expression. */
  99. static inline void expr_init(ExpDesc *e, ExpKind k, uint32_t info)
  100. {
  101.     e->k = k;
  102.     e->u.s.info = info;
  103.     e->f = e->t = NO_JMP;
  104. }

  105. /* Check number constant for +-0. */
  106. static int expr_numiszero(ExpDesc *e)
  107. {
  108.     ktap_val_t *o = expr_numtv(e);
  109.     return (nvalue(o) == 0);
  110. }

  111. /* Per-function linked list of scope blocks. */
  112. typedef struct FuncScope {
  113.     struct FuncScope *prev;    /* Link to outer scope. */
  114.     int vstart;        /* Start of block-local variables. */
  115.     uint8_t nactvar;    /* Number of active vars outside the scope. */
  116.     uint8_t flags;        /* Scope flags. */
  117. } FuncScope;

  118. #define FSCOPE_LOOP        0x01    /* Scope is a (breakable) loop. */
  119. #define FSCOPE_BREAK        0x02    /* Break used in scope. */
  120. #define FSCOPE_GOLA        0x04    /* Goto or label used in scope. */
  121. #define FSCOPE_UPVAL        0x08    /* Upvalue in scope. */
  122. #define FSCOPE_NOCLOSE        0x10    /* Do not close upvalues. */

  123. #define NAME_BREAK        ((ktap_str_t *)(uintptr_t)1)

  124. /* Index into variable stack. */
  125. typedef uint16_t VarIndex;
  126. #define KP_MAX_VSTACK        (65536 - KP_MAX_UPVAL)

  127. /* Variable/goto/label info. */
  128. #define VSTACK_VAR_RW        0x01    /* R/W variable. */
  129. #define VSTACK_GOTO        0x02    /* Pending goto. */
  130. #define VSTACK_LABEL        0x04    /* Label. */

  131. /* Per-function state. */
  132. typedef struct FuncState {
  133.     ktap_tab_t *kt;        /* Hash table for constants. */
  134.     LexState *ls;        /* Lexer state. */
  135.     FuncScope *bl;        /* Current scope. */
  136.     struct FuncState *prev;    /* Enclosing function. */
  137.     BCPos pc;        /* Next bytecode position. */
  138.     BCPos lasttarget;    /* Bytecode position of last jump target. */
  139.     BCPos jpc;        /* Pending jump list to next bytecode. */
  140.     BCReg freereg;        /* First free register. */
  141.     BCReg nactvar;        /* Number of active local variables. */
  142.     BCReg nkn, nkgc;        /* Number of ktap_number/ktap_obj_t constants*/
  143.     BCLine linedefined;    /* First line of the function definition. */
  144.     BCInsLine *bcbase;    /* Base of bytecode stack. */
  145.     BCPos bclim;        /* Limit of bytecode stack. */
  146.     int vbase;        /* Base of variable stack for this function. */
  147.     uint8_t flags;        /* Prototype flags. */
  148.     uint8_t numparams;    /* Number of parameters. */
  149.     uint8_t framesize;    /* Fixed frame size. */
  150.     uint8_t nuv;        /* Number of upvalues */
  151.     VarIndex varmap[KP_MAX_LOCVAR];/* Map from register to variable idx. */
  152.     VarIndex uvmap[KP_MAX_UPVAL];    /* Map from upvalue to variable idx. */
  153.     VarIndex uvtmp[KP_MAX_UPVAL];    /* Temporary upvalue map. */
  154. } FuncState;

  155. /* Binary and unary operators. ORDER OPR */
  156. typedef enum BinOpr {
  157.     OPR_ADD, OPR_SUB, OPR_MUL, OPR_DIV, OPR_MOD, OPR_POW, /* ORDER ARITH */
  158.     OPR_CONCAT,
  159.     OPR_NE, OPR_EQ,
  160.     OPR_LT, OPR_GE, OPR_LE, OPR_GT,
  161.     OPR_AND, OPR_OR,
  162.     OPR_NOBINOPR
  163. } BinOpr;

  164. KP_STATIC_ASSERT((int)BC_ISGE-(int)BC_ISLT == (int)OPR_GE-(int)OPR_LT);
  165. KP_STATIC_ASSERT((int)BC_ISLE-(int)BC_ISLT == (int)OPR_LE-(int)OPR_LT);
  166. KP_STATIC_ASSERT((int)BC_ISGT-(int)BC_ISLT == (int)OPR_GT-(int)OPR_LT);
  167. KP_STATIC_ASSERT((int)BC_SUBVV-(int)BC_ADDVV == (int)OPR_SUB-(int)OPR_ADD);
  168. KP_STATIC_ASSERT((int)BC_MULVV-(int)BC_ADDVV == (int)OPR_MUL-(int)OPR_ADD);
  169. KP_STATIC_ASSERT((int)BC_DIVVV-(int)BC_ADDVV == (int)OPR_DIV-(int)OPR_ADD);
  170. KP_STATIC_ASSERT((int)BC_MODVV-(int)BC_ADDVV == (int)OPR_MOD-(int)OPR_ADD);

  171. /* -- Error handling ------------------------------------------------------ */

  172. static void err_syntax(LexState *ls, ErrMsg em)
  173. {
  174.     kp_lex_error(ls, ls->tok, em);
  175. }

  176. static void err_token(LexState *ls, LexToken tok)
  177. {
  178.     kp_lex_error(ls, ls->tok, KP_ERR_XTOKEN, kp_lex_token2str(ls, tok));
  179. }

  180. static void err_limit(FuncState *fs, uint32_t limit, const char *what)
  181. {
  182.     if (fs->linedefined == 0)
  183.         kp_lex_error(fs->ls, 0, KP_ERR_XLIMM, limit, what);
  184.     else
  185.         kp_lex_error(fs->ls, 0, KP_ERR_XLIMF, fs->linedefined,
  186.                 limit, what);
  187. }

  188. #define checklimit(fs, v, l, m)        if ((v) >= (l)) err_limit(fs, l, m)
  189. #define checklimitgt(fs, v, l, m)    if ((v) > (l)) err_limit(fs, l, m)
  190. #define checkcond(ls, c, em)        { if (!(c)) err_syntax(ls, em); }

  191. /* -- Management of constants --------------------------------------------- */

  192. /* Return bytecode encoding for primitive constant. */
  193. #define const_pri(e)    ((e)->k)

  194. #define tvhaskslot(o)    (is_number(o))
  195. #define tvkslot(o)    (nvalue(o))

  196. /* Add a number constant. */
  197. static BCReg const_num(FuncState *fs, ExpDesc *e)
  198. {
  199.     ktap_val_t *o;

  200.     kp_assert(expr_isnumk(e));
  201.     o = kp_tab_set(fs->kt, &e->u.nval);
  202.     if (tvhaskslot(o))
  203.         return tvkslot(o);
  204.     set_number(o, fs->nkn);
  205.     return fs->nkn++;
  206. }

  207. /* Add a GC object constant. */
  208. static BCReg const_gc(FuncState *fs, ktap_obj_t *gc, uint32_t itype)
  209. {
  210.     ktap_val_t key, *o;

  211.     setitype(&key, itype);
  212.     key.val.gc = gc;
  213.     o = kp_tab_set(fs->kt, &key);
  214.     if (tvhaskslot(o))
  215.         return tvkslot(o);
  216.     set_number(o, fs->nkgc);
  217.     return fs->nkgc++;
  218. }

  219. /* Add a string constant. */
  220. static BCReg const_str(FuncState *fs, ExpDesc *e)
  221. {
  222.     kp_assert(expr_isstrk(e) || e->k == VGLOBAL);
  223.     return const_gc(fs, obj2gco(e->u.sval), KTAP_TSTR);
  224. }

  225. /* Anchor string constant. */
  226. ktap_str_t *kp_parse_keepstr(LexState *ls, const char *str, size_t len)
  227. {
  228.     ktap_val_t v;
  229.     ktap_str_t *s = kp_str_new(str, len);

  230.     set_string(&v, s);
  231.     ktap_val_t *tv = kp_tab_set(ls->fs->kt, &v);
  232.     if (is_nil(tv))
  233.         set_bool(tv, 1);
  234.     return s;
  235. }

  236. /* -- Jump list handling -------------------------------------------------- */

  237. /* Get next element in jump list. */
  238. static BCPos jmp_next(FuncState *fs, BCPos pc)
  239. {
  240.     ptrdiff_t delta = bc_j(fs->bcbase[pc].ins);
  241.     if ((BCPos)delta == NO_JMP)
  242.         return NO_JMP;
  243.     else
  244.         return (BCPos)(((ptrdiff_t)pc+1)+delta);
  245. }

  246. /* Check if any of the instructions on the jump list produce no value. */
  247. static int jmp_novalue(FuncState *fs, BCPos list)
  248. {
  249.     for (; list != NO_JMP; list = jmp_next(fs, list)) {
  250.         BCIns p = fs->bcbase[list >= 1 ? list-1 : list].ins;
  251.         if (!(bc_op(p) == BC_ISTC || bc_op(p) == BC_ISFC ||
  252.             bc_a(p) == NO_REG))
  253.         return 1;
  254.     }
  255.     return 0;
  256. }

  257. /* Patch register of test instructions. */
  258. static int jmp_patchtestreg(FuncState *fs, BCPos pc, BCReg reg)
  259. {
  260.     BCInsLine *ilp = &fs->bcbase[pc >= 1 ? pc-1 : pc];
  261.     BCOp op = bc_op(ilp->ins);

  262.     if (op == BC_ISTC || op == BC_ISFC) {
  263.         if (reg != NO_REG && reg != bc_d(ilp->ins)) {
  264.             setbc_a(&ilp->ins, reg);
  265.         } else {/* Nothing to store or already in the right register */
  266.             setbc_op(&ilp->ins, op+(BC_IST-BC_ISTC));
  267.             setbc_a(&ilp->ins, 0);
  268.         }
  269.     } else if (bc_a(ilp->ins) == NO_REG) {
  270.         if (reg == NO_REG) {
  271.             ilp->ins =
  272.                 BCINS_AJ(BC_JMP, bc_a(fs->bcbase[pc].ins), 0);
  273.         } else {
  274.             setbc_a(&ilp->ins, reg);
  275.             if (reg >= bc_a(ilp[1].ins))
  276.                 setbc_a(&ilp[1].ins, reg+1);
  277.         }
  278.     } else {
  279.         return 0/* Cannot patch other instructions. */
  280.     }
  281.     return 1;
  282. }

  283. /* Drop values for all instructions on jump list. */
  284. static void jmp_dropval(FuncState *fs, BCPos list)
  285. {
  286.     for (; list != NO_JMP; list = jmp_next(fs, list))
  287.         jmp_patchtestreg(fs, list, NO_REG);
  288. }

  289. /* Patch jump instruction to target. */
  290. static void jmp_patchins(FuncState *fs, BCPos pc, BCPos dest)
  291. {
  292.     BCIns *jmp = &fs->bcbase[pc].ins;
  293.     BCPos offset = dest-(pc+1)+BCBIAS_J;

  294.     kp_assert(dest != NO_JMP);
  295.     if (offset > BCMAX_D)
  296.         err_syntax(fs->ls, KP_ERR_XJUMP);
  297.     setbc_d(jmp, offset);
  298. }

  299. /* Append to jump list. */
  300. static void jmp_append(FuncState *fs, BCPos *l1, BCPos l2)
  301. {
  302.     if (l2 == NO_JMP) {
  303.         return;
  304.     } else if (*l1 == NO_JMP) {
  305.         *l1 = l2;
  306.     } else {
  307.         BCPos list = *l1;
  308.         BCPos next;
  309.         /* Find last element. */
  310.         while ((next = jmp_next(fs, list)) != NO_JMP)
  311.             list = next;
  312.         jmp_patchins(fs, list, l2);
  313.     }
  314. }

  315. /* Patch jump list and preserve produced values. */
  316. static void jmp_patchval(FuncState *fs, BCPos list, BCPos vtarget,
  317.              BCReg reg, BCPos dtarget)
  318. {
  319.     while (list != NO_JMP) {
  320.         BCPos next = jmp_next(fs, list);
  321.         if (jmp_patchtestreg(fs, list, reg)) {
  322.             /* Jump to target with value. */
  323.             jmp_patchins(fs, list, vtarget);
  324.         } else {
  325.             /* Jump to default target. */
  326.             jmp_patchins(fs, list, dtarget);
  327.         }
  328.         list = next;
  329.     }
  330. }

  331. /* Jump to following instruction. Append to list of pending jumps. */
  332. static void jmp_tohere(FuncState *fs, BCPos list)
  333. {
  334.     fs->lasttarget = fs->pc;
  335.     jmp_append(fs, &fs->jpc, list);
  336. }

  337. /* Patch jump list to target. */
  338. static void jmp_patch(FuncState *fs, BCPos list, BCPos target)
  339. {
  340.     if (target == fs->pc) {
  341.         jmp_tohere(fs, list);
  342.     } else {
  343.         kp_assert(target < fs->pc);
  344.         jmp_patchval(fs, list, target, NO_REG, target);
  345.     }
  346. }

  347. /* -- Bytecode register allocator ----------------------------------------- */

  348. /* Bump frame size. */
  349. static void bcreg_bump(FuncState *fs, BCReg n)
  350. {
  351.     BCReg sz = fs->freereg + n;

  352.     if (sz > fs->framesize) {
  353.         if (sz >= KP_MAX_SLOTS)
  354.             err_syntax(fs->ls, KP_ERR_XSLOTS);
  355.         fs->framesize = (uint8_t)sz;
  356.     }
  357. }

  358. /* Reserve registers. */
  359. static void bcreg_reserve(FuncState *fs, BCReg n)
  360. {
  361.     bcreg_bump(fs, n);
  362.     fs->freereg += n;
  363. }

  364. /* Free register. */
  365. static void bcreg_free(FuncState *fs, BCReg reg)
  366. {
  367.     if (reg >= fs->nactvar) {
  368.         fs->freereg--;
  369.         kp_assert(reg == fs->freereg);
  370.     }
  371. }

  372. /* Free register for expression. */
  373. static void expr_free(FuncState *fs, ExpDesc *e)
  374. {
  375.     if (e->k == VNONRELOC)
  376.         bcreg_free(fs, e->u.s.info);
  377. }

  378. /* -- Bytecode emitter ---------------------------------------------------- */

  379. /* Emit bytecode instruction. */
  380. static BCPos bcemit_INS(FuncState *fs, BCIns ins)
  381. {
  382.     BCPos pc = fs->pc;
  383.     LexState *ls = fs->ls;

  384.     jmp_patchval(fs, fs->jpc, pc, NO_REG, pc);
  385.     fs->jpc = NO_JMP;
  386.     if (pc >= fs->bclim) {
  387.         ptrdiff_t base = fs->bcbase - ls->bcstack;
  388.         checklimit(fs, ls->sizebcstack, KP_MAX_BCINS,
  389.                 "bytecode instructions");
  390.         if (!ls->bcstack) {
  391.             ls->bcstack = malloc(sizeof(BCInsLine) * 20);
  392.             ls->sizebcstack = 20;
  393.         } else {
  394.             ls->bcstack = realloc(ls->bcstack,
  395.                 ls->sizebcstack * sizeof(BCInsLine) * 2);
  396.             ls->sizebcstack = ls->sizebcstack * 2;
  397.         }
  398.         fs->bclim = (BCPos)(ls->sizebcstack - base);
  399.         fs->bcbase = ls->bcstack + base;
  400.     }
  401.     fs->bcbase[pc].ins = ins;
  402.     fs->bcbase[pc].line = ls->lastline;
  403.     fs->pc = pc+1;
  404.     return pc;
  405. }

  406. #define bcemit_ABC(fs, o, a, b, c)    bcemit_INS(fs, BCINS_ABC(o, a, b, c))
  407. #define bcemit_AD(fs, o, a, d)        bcemit_INS(fs, BCINS_AD(o, a, d))
  408. #define bcemit_AJ(fs, o, a, j)        bcemit_INS(fs, BCINS_AJ(o, a, j))

  409. #define bcptr(fs, e)            (&(fs)->bcbase[(e)->u.s.info].ins)

  410. /* -- Bytecode emitter for expressions ------------------------------------ */

  411. /* Discharge non-constant expression to any register. */
  412. static void expr_discharge(FuncState *fs, ExpDesc *e)
  413. {
  414.     BCIns ins;

  415.     if (e->k == VUPVAL) {
  416.         ins = BCINS_AD(BC_UGET, 0, e->u.s.info);
  417.     } else if (e->k == VGLOBAL) {
  418.         ins = BCINS_AD(BC_GGET, 0, const_str(fs, e));
  419.     } else if (e->k == VINDEXED) {
  420.         BCReg rc = e->u.s.aux;
  421.         if ((int32_t)rc < 0) {
  422.             ins = BCINS_ABC(BC_TGETS, 0, e->u.s.info, ~rc);
  423.         } else if (rc > BCMAX_C) {
  424.             ins = BCINS_ABC(BC_TGETB, 0, e->u.s.info,
  425.                     rc-(BCMAX_C+1));
  426.         } else {
  427.             bcreg_free(fs, rc);
  428.             ins = BCINS_ABC(BC_TGETV, 0, e->u.s.info, rc);
  429.         }
  430.         bcreg_free(fs, e->u.s.info);
  431.     } else if (e->k == VCALL) {
  432.         e->u.s.info = e->u.s.aux;
  433.         e->k = VNONRELOC;
  434.         return;
  435.     } else if (e->k == VLOCAL) {
  436.         e->k = VNONRELOC;
  437.         return;
  438.     } else {
  439.         return;
  440.     }

  441.     e->u.s.info = bcemit_INS(fs, ins);
  442.     e->k = VRELOCABLE;
  443. }

  444. /* Emit bytecode to set a range of registers to nil. */
  445. static void bcemit_nil(FuncState *fs, BCReg from, BCReg n)
  446. {
  447.     if (fs->pc > fs->lasttarget) {  /* No jumps to current position? */
  448.         BCIns *ip = &fs->bcbase[fs->pc-1].ins;
  449.         BCReg pto, pfrom = bc_a(*ip);
  450.         /* Try to merge with the previous instruction. */
  451.         switch (bc_op(*ip)) {
  452.         case BC_KPRI:
  453.             if (bc_d(*ip) != ~KTAP_TNIL) break;
  454.             if (from == pfrom) {
  455.                 if (n == 1)
  456.                     return;
  457.             } else if (from == pfrom+1) {
  458.                 from = pfrom;
  459.                 n++;
  460.             } else {
  461.                 break;
  462.             }
  463.             /* Replace KPRI. */
  464.             *ip = BCINS_AD(BC_KNIL, from, from+n-1);
  465.             return;
  466.         case BC_KNIL:
  467.             pto = bc_d(*ip);
  468.             /* Can we connect both ranges? */
  469.             if (pfrom <= from && from <= pto+1) {
  470.                 if (from+n-1 > pto) {
  471.                     /* Patch previous instruction range. */
  472.                     setbc_d(ip, from+n-1);
  473.                 }
  474.                 return;
  475.             }
  476.             break;
  477.         default:
  478.             break;
  479.         }
  480.     }

  481.     /* Emit new instruction or replace old instruction. */
  482.     bcemit_INS(fs, n == 1 ? BCINS_AD(BC_KPRI, from, VKNIL) :
  483.                 BCINS_AD(BC_KNIL, from, from+n-1));
  484. }

  485. /* Discharge an expression to a specific register. Ignore branches. */
  486. static void expr_toreg_nobranch(FuncState *fs, ExpDesc *e, BCReg reg)
  487. {
  488.     BCIns ins;

  489.     expr_discharge(fs, e);
  490.     if (e->k == VKSTR) {
  491.         ins = BCINS_AD(BC_KSTR, reg, const_str(fs, e));
  492.     } else if (e->k == VKNUM) {
  493.         ktap_number n = expr_numberV(e);
  494.         if (n >= 0 && n <= 0xffff) {
  495.             ins = BCINS_AD(BC_KSHORT, reg, (BCReg)(uint16_t)n);
  496.         } else
  497.             ins = BCINS_AD(BC_KNUM, reg, const_num(fs, e));
  498.     } else if (e->k == VRELOCABLE) {
  499.         setbc_a(bcptr(fs, e), reg);
  500.         goto noins;
  501.     } else if (e->k == VNONRELOC) {
  502.         if (reg == e->u.s.info)
  503.             goto noins;
  504.         ins = BCINS_AD(BC_MOV, reg, e->u.s.info);
  505.     } else if (e->k == VKNIL) {
  506.         bcemit_nil(fs, reg, 1);
  507.         goto noins;
  508.     } else if (e->k <= VKTRUE) {
  509.         ins = BCINS_AD(BC_KPRI, reg, const_pri(e));
  510.     } else if (e->k == VARGN) {
  511.         ins = BCINS_AD(BC_VARGN, reg, e->u.s.info);
  512.     } else if (e->k > VARGN && e->k < VMAX) {
  513.         ins = BCINS_AD(e->k - VARGN + BC_VARGN, reg, 0);
  514.     } else {
  515.         kp_assert(e->k == VVOID || e->k == VJMP);
  516.         return;
  517.     }
  518.     bcemit_INS(fs, ins);
  519. noins:
  520.     e->u.s.info = reg;
  521.     e->k = VNONRELOC;
  522. }

  523. /* Forward declaration. */
  524. static BCPos bcemit_jmp(FuncState *fs);

  525. /* Discharge an expression to a specific register. */
  526. static void expr_toreg(FuncState *fs, ExpDesc *e, BCReg reg)
  527. {
  528.     expr_toreg_nobranch(fs, e, reg);
  529.     if (e->k == VJMP) {
  530.         /* Add it to the true jump list. */
  531.         jmp_append(fs, &e->t, e->u.s.info);
  532.     }
  533.     if (expr_hasjump(e)) {  /* Discharge expression with branches. */
  534.         BCPos jend, jfalse = NO_JMP, jtrue = NO_JMP;
  535.         if (jmp_novalue(fs, e->t) || jmp_novalue(fs, e->f)) {
  536.             BCPos jval = (e->k == VJMP) ? NO_JMP : bcemit_jmp(fs);
  537.             jfalse = bcemit_AD(fs, BC_KPRI, reg, VKFALSE);
  538.             bcemit_AJ(fs, BC_JMP, fs->freereg, 1);
  539.             jtrue = bcemit_AD(fs, BC_KPRI, reg, VKTRUE);
  540.             jmp_tohere(fs, jval);
  541.         }
  542.         jend = fs->pc;
  543.         fs->lasttarget = jend;
  544.         jmp_patchval(fs, e->f, jend, reg, jfalse);
  545.         jmp_patchval(fs, e->t, jend, reg, jtrue);
  546.     }
  547.     e->f = e->t = NO_JMP;
  548.     e->u.s.info = reg;
  549.     e->k = VNONRELOC;
  550. }

  551. /* Discharge an expression to the next free register. */
  552. static void expr_tonextreg(FuncState *fs, ExpDesc *e)
  553. {
  554.     expr_discharge(fs, e);
  555.     expr_free(fs, e);
  556.     bcreg_reserve(fs, 1);
  557.     expr_toreg(fs, e, fs->freereg - 1);
  558. }

  559. /* Discharge an expression to any register. */
  560. static BCReg expr_toanyreg(FuncState *fs, ExpDesc *e)
  561. {
  562.     expr_discharge(fs, e);
  563.     if (e->k == VNONRELOC) {
  564.         if (!expr_hasjump(e))
  565.             return e->u.s.info;  /* Already in a register. */
  566.         if (e->u.s.info >= fs->nactvar) {
  567.             /* Discharge to temp. register. */
  568.             expr_toreg(fs, e, e->u.s.info);
  569.             return e->u.s.info;
  570.         }
  571.     }
  572.     expr_tonextreg(fs, e);  /* Discharge to next register. */
  573.     return e->u.s.info;
  574. }

  575. /* Partially discharge expression to a value. */
  576. static void expr_toval(FuncState *fs, ExpDesc *e)
  577. {
  578.     if (expr_hasjump(e))
  579.         expr_toanyreg(fs, e);
  580.     else
  581.         expr_discharge(fs, e);
  582. }

  583. /* Emit store for LHS expression. */
  584. static void bcemit_store(FuncState *fs, ExpDesc *var, ExpDesc *e)
  585. {
  586.     BCIns ins;

  587.     if (var->k == VLOCAL) {
  588.         fs->ls->vstack[var->u.s.aux].info |= VSTACK_VAR_RW;
  589.         expr_free(fs, e);
  590.         expr_toreg(fs, e, var->u.s.info);
  591.         return;
  592.     } else if (var->k == VUPVAL) {
  593.         fs->ls->vstack[var->u.s.aux].info |= VSTACK_VAR_RW;
  594.         expr_toval(fs, e);
  595.         if (e->k <= VKTRUE)
  596.             ins = BCINS_AD(BC_USETP, var->u.s.info, const_pri(e));
  597.         else if (e->k == VKSTR)
  598.             ins = BCINS_AD(BC_USETS, var->u.s.info,
  599.                     const_str(fs, e));
  600.         else if (e->k == VKNUM)
  601.             ins = BCINS_AD(BC_USETN, var->u.s.info,
  602.                     const_num(fs, e));
  603.         else
  604.             ins = BCINS_AD(BC_USETV, var->u.s.info,
  605.                     expr_toanyreg(fs, e));
  606.     } else if (var->k == VGLOBAL) {
  607.         BCReg ra = expr_toanyreg(fs, e);
  608.         ins = BCINS_AD(BC_GSET, ra, const_str(fs, var));
  609.     } else {
  610.         BCReg ra, rc;
  611.         kp_assert(var->k == VINDEXED);
  612.         ra = expr_toanyreg(fs, e);
  613.         rc = var->u.s.aux;
  614.         if ((int32_t)rc < 0) {
  615.             ins = BCINS_ABC(BC_TSETS, ra, var->u.s.info, ~rc);
  616.         } else if (rc > BCMAX_C) {
  617.             ins = BCINS_ABC(BC_TSETB, ra, var->u.s.info,
  618.                 rc-(BCMAX_C+1));
  619.         } else {
  620.             /*
  621.              * Free late alloced key reg to avoid assert on
  622.              * free of value reg. This can only happen when
  623.              * called from expr_table().
  624.              */
  625.             kp_assert(e->k != VNONRELOC || ra < fs->nactvar ||
  626.                     rc < ra || (bcreg_free(fs, rc),1));
  627.             ins = BCINS_ABC(BC_TSETV, ra, var->u.s.info, rc);
  628.         }
  629.     }
  630.     bcemit_INS(fs, ins);
  631.     expr_free(fs, e);
  632. }

  633. /* Emit store for '+=' expression. */
  634. static void bcemit_store_incr(FuncState *fs, ExpDesc *var, ExpDesc *e)
  635. {
  636.     BCIns ins;

  637.     if (var->k == VLOCAL) {
  638.         /* don't need to do like "var a=0; a+=1", just use 'a=a+1' */
  639.         err_syntax(fs->ls, KP_ERR_XSYMBOL);
  640.         return;
  641.     } else if (var->k == VUPVAL) {
  642.         fs->ls->vstack[var->u.s.aux].info |= VSTACK_VAR_RW;
  643.         expr_toval(fs, e);
  644.         if (e->k == VKNUM) {
  645.             ins = BCINS_AD(BC_UINCN, var->u.s.info,
  646.                     const_num(fs, e));
  647.         } else if (e->k <= VKTRUE || e->k == VKSTR) {
  648.             err_syntax(fs->ls, KP_ERR_XSYMBOL);
  649.             return;
  650.         } else
  651.             ins = BCINS_AD(BC_UINCV, var->u.s.info,
  652.                     expr_toanyreg(fs, e));
  653.     } else if (var->k == VGLOBAL) {
  654.         BCReg ra = expr_toanyreg(fs, e);
  655.         ins = BCINS_AD(BC_GINC, ra, const_str(fs, var));
  656.     } else {
  657.         BCReg ra, rc;
  658.         kp_assert(var->k == VINDEXED);
  659.         ra = expr_toanyreg(fs, e);
  660.         rc = var->u.s.aux;
  661.         if ((int32_t)rc < 0) {
  662.             ins = BCINS_ABC(BC_TINCS, ra, var->u.s.info, ~rc);
  663.         } else if (rc > BCMAX_C) {
  664.             ins = BCINS_ABC(BC_TINCB, ra, var->u.s.info,
  665.                 rc-(BCMAX_C+1));
  666.         } else {
  667.             /*
  668.              * Free late alloced key reg to avoid assert on
  669.              * free of value reg. This can only happen when
  670.              * called from expr_table().
  671.              */
  672.             kp_assert(e->k != VNONRELOC || ra < fs->nactvar ||
  673.                     rc < ra || (bcreg_free(fs, rc),1));
  674.             ins = BCINS_ABC(BC_TINCV, ra, var->u.s.info, rc);
  675.         }
  676.     }
  677.     bcemit_INS(fs, ins);
  678.     expr_free(fs, e);
  679. }


  680. /* Emit method lookup expression. */
  681. static void bcemit_method(FuncState *fs, ExpDesc *e, ExpDesc *key)
  682. {
  683.     BCReg idx, func, obj = expr_toanyreg(fs, e);

  684.     expr_free(fs, e);
  685.     func = fs->freereg;
  686.     bcemit_AD(fs, BC_MOV, func+1, obj);/* Copy object to first argument. */
  687.     kp_assert(expr_isstrk(key));
  688.     idx = const_str(fs, key);
  689.     if (idx <= BCMAX_C) {
  690.         bcreg_reserve(fs, 2);
  691.         bcemit_ABC(fs, BC_TGETS, func, obj, idx);
  692.     } else {
  693.         bcreg_reserve(fs, 3);
  694.         bcemit_AD(fs, BC_KSTR, func+2, idx);
  695.         bcemit_ABC(fs, BC_TGETV, func, obj, func+2);
  696.         fs->freereg--;
  697.     }
  698.     e->u.s.info = func;
  699.     e->k = VNONRELOC;
  700. }

  701. /* -- Bytecode emitter for branches --------------------------------------- */

  702. /* Emit unconditional branch. */
  703. static BCPos bcemit_jmp(FuncState *fs)
  704. {
  705.     BCPos jpc = fs->jpc;
  706.     BCPos j = fs->pc - 1;
  707.     BCIns *ip = &fs->bcbase[j].ins;

  708.     fs->jpc = NO_JMP;
  709.     if ((int32_t)j >= (int32_t)fs->lasttarget && bc_op(*ip) == BC_UCLO)
  710.         setbc_j(ip, NO_JMP);
  711.     else
  712.         j = bcemit_AJ(fs, BC_JMP, fs->freereg, NO_JMP);
  713.     jmp_append(fs, &j, jpc);
  714.     return j;
  715. }

  716. /* Invert branch condition of bytecode instruction. */
  717. static void invertcond(FuncState *fs, ExpDesc *e)
  718. {
  719.     BCIns *ip = &fs->bcbase[e->u.s.info - 1].ins;
  720.     setbc_op(ip, bc_op(*ip)^1);
  721. }

  722. /* Emit conditional branch. */
  723. static BCPos bcemit_branch(FuncState *fs, ExpDesc *e, int cond)
  724. {
  725.     BCPos pc;

  726.     if (e->k == VRELOCABLE) {
  727.         BCIns *ip = bcptr(fs, e);
  728.         if (bc_op(*ip) == BC_NOT) {
  729.             *ip = BCINS_AD(cond ? BC_ISF : BC_IST, 0, bc_d(*ip));
  730.             return bcemit_jmp(fs);
  731.         }
  732.     }
  733.     if (e->k != VNONRELOC) {
  734.         bcreg_reserve(fs, 1);
  735.         expr_toreg_nobranch(fs, e, fs->freereg-1);
  736.     }
  737.     bcemit_AD(fs, cond ? BC_ISTC : BC_ISFC, NO_REG, e->u.s.info);
  738.     pc = bcemit_jmp(fs);
  739.     expr_free(fs, e);
  740.     return pc;
  741. }

  742. /* Emit branch on true condition. */
  743. static void bcemit_branch_t(FuncState *fs, ExpDesc *e)
  744. {
  745.     BCPos pc;

  746.     expr_discharge(fs, e);
  747.     if (e->k == VKSTR || e->k == VKNUM || e->k == VKTRUE)
  748.         pc = NO_JMP/* Never jump. */
  749.     else if (e->k == VJMP)
  750.         invertcond(fs, e), pc = e->u.s.info;
  751.     else if (e->k == VKFALSE || e->k == VKNIL)
  752.         expr_toreg_nobranch(fs, e, NO_REG), pc = bcemit_jmp(fs);
  753.     else
  754.         pc = bcemit_branch(fs, e, 0);
  755.     jmp_append(fs, &e->f, pc);
  756.     jmp_tohere(fs, e->t);
  757.     e->t = NO_JMP;
  758. }

  759. /* Emit branch on false condition. */
  760. static void bcemit_branch_f(FuncState *fs, ExpDesc *e)
  761. {
  762.     BCPos pc;

  763.     expr_discharge(fs, e);
  764.     if (e->k == VKNIL || e->k == VKFALSE)
  765.         pc = NO_JMP/* Never jump. */
  766.     else if (e->k == VJMP)
  767.         pc = e->u.s.info;
  768.     else if (e->k == VKSTR || e->k == VKNUM || e->k == VKTRUE)
  769.         expr_toreg_nobranch(fs, e, NO_REG), pc = bcemit_jmp(fs);
  770.     else
  771.         pc = bcemit_branch(fs, e, 1);
  772.     jmp_append(fs, &e->t, pc);
  773.     jmp_tohere(fs, e->f);
  774.     e->f = NO_JMP;
  775. }

  776. /* -- Bytecode emitter for operators -------------------------------------- */

  777. static ktap_number number_foldarith(ktap_number x, ktap_number y, int op)
  778. {
  779.     switch (op) {
  780.     case OPR_ADD - OPR_ADD: return x + y;
  781.     case OPR_SUB - OPR_ADD: return x - y;
  782.     case OPR_MUL - OPR_ADD: return x * y;
  783.     case OPR_DIV - OPR_ADD: return x / y;
  784.     default: return x;
  785.     }
  786. }

  787. /* Try constant-folding of arithmetic operators. */
  788. static int foldarith(BinOpr opr, ExpDesc *e1, ExpDesc *e2)
  789. {
  790.     ktap_val_t o;
  791.     ktap_number n;

  792.     if (!expr_isnumk_nojump(e1) || !expr_isnumk_nojump(e2))
  793.         return 0;

  794.     if (opr == OPR_DIV && expr_numberV(e2) == 0)
  795.         return 0; /* do not attempt to divide by 0 */

  796.     if (opr == OPR_MOD)
  797.         return 0; /* ktap current do not suppor pow arith */

  798.     n = number_foldarith(expr_numberV(e1), expr_numberV(e2),
  799.                 (int)opr-OPR_ADD);
  800.     set_number(&o, n);
  801.     set_number(&e1->u.nval, n);
  802.     return 1;
  803. }

  804. /* Emit arithmetic operator. */
  805. static void bcemit_arith(FuncState *fs, BinOpr opr, ExpDesc *e1, ExpDesc *e2)
  806. {
  807.     BCReg rb, rc, t;
  808.     uint32_t op;

  809.     if (foldarith(opr, e1, e2))
  810.         return;
  811.     if (opr == OPR_POW) {
  812.         op = BC_POW;
  813.         rc = expr_toanyreg(fs, e2);
  814.         rb = expr_toanyreg(fs, e1);
  815.     } else {
  816.         op = opr-OPR_ADD+BC_ADDVV;
  817.         /*
  818.          * Must discharge 2nd operand first since VINDEXED
  819.          * might free regs.
  820.          */
  821.         expr_toval(fs, e2);
  822.         if (expr_isnumk(e2) && (rc = const_num(fs, e2)) <= BCMAX_C)
  823.             op -= BC_ADDVV-BC_ADDVN;
  824.         else
  825.             rc = expr_toanyreg(fs, e2);
  826.         /* 1st operand discharged by bcemit_binop_left,
  827.          * but need KNUM/KSHORT. */
  828.         kp_assert(expr_isnumk(e1) || e1->k == VNONRELOC);
  829.         expr_toval(fs, e1);
  830.         /* Avoid two consts to satisfy bytecode constraints. */
  831.         if (expr_isnumk(e1) && !expr_isnumk(e2) &&
  832.             (t = const_num(fs, e1)) <= BCMAX_B) {
  833.             rb = rc; rc = t; op -= BC_ADDVV-BC_ADDNV;
  834.         } else {
  835.             rb = expr_toanyreg(fs, e1);
  836.         }
  837.     }
  838.     /* Using expr_free might cause asserts if the order is wrong. */
  839.     if (e1->k == VNONRELOC && e1->u.s.info >= fs->nactvar)
  840.         fs->freereg--;
  841.     if (e2->k == VNONRELOC && e2->u.s.info >= fs->nactvar)
  842.         fs->freereg--;
  843.     e1->u.s.info = bcemit_ABC(fs, op, 0, rb, rc);
  844.     e1->k = VRELOCABLE;
  845. }

  846. /* Emit comparison operator. */
  847. static void bcemit_comp(FuncState *fs, BinOpr opr, ExpDesc *e1, ExpDesc *e2)
  848. {
  849.     ExpDesc *eret = e1;
  850.     BCIns ins;

  851.     expr_toval(fs, e1);
  852.     if (opr == OPR_EQ || opr == OPR_NE) {
  853.         BCOp op = opr == OPR_EQ ? BC_ISEQV : BC_ISNEV;
  854.         BCReg ra;

  855.         if (expr_isk(e1)) { /* Need constant in 2nd arg. */
  856.             e1 = e2;
  857.             e2 = eret;
  858.         }
  859.         ra = expr_toanyreg(fs, e1);  /* First arg must be in a reg. */
  860.         expr_toval(fs, e2);
  861.         switch (e2->k) {
  862.         case VKNIL: case VKFALSE: case VKTRUE:
  863.             ins = BCINS_AD(op+(BC_ISEQP-BC_ISEQV), ra,
  864.                     const_pri(e2));
  865.             break;
  866.         case VKSTR:
  867.             ins = BCINS_AD(op+(BC_ISEQS-BC_ISEQV), ra,
  868.                     const_str(fs, e2));
  869.             break;
  870.         case VKNUM:
  871.             ins = BCINS_AD(op+(BC_ISEQN-BC_ISEQV), ra,
  872.                     const_num(fs, e2));
  873.             break;
  874.         default:
  875.             ins = BCINS_AD(op, ra, expr_toanyreg(fs, e2));
  876.             break;
  877.         }
  878.     } else {
  879.         uint32_t op = opr-OPR_LT+BC_ISLT;
  880.         BCReg ra, rd;
  881.         if ((op-BC_ISLT) & 1) {  /* GT -> LT, GE -> LE */
  882.             e1 = e2; e2 = eret;  /* Swap operands. */
  883.             op = ((op-BC_ISLT)^3)+BC_ISLT;
  884.             expr_toval(fs, e1);
  885.         }
  886.         rd = expr_toanyreg(fs, e2);
  887.         ra = expr_toanyreg(fs, e1);
  888.         ins = BCINS_AD(op, ra, rd);
  889.     }
  890.     /* Using expr_free might cause asserts if the order is wrong. */
  891.     if (e1->k == VNONRELOC && e1->u.s.info >= fs->nactvar)
  892.         fs->freereg--;
  893.     if (e2->k == VNONRELOC && e2->u.s.info >= fs->nactvar)
  894.         fs->freereg--;
  895.     bcemit_INS(fs, ins);
  896.     eret->u.s.info = bcemit_jmp(fs);
  897.     eret->k = VJMP;
  898. }

  899. /* Fixup left side of binary operator. */
  900. static void bcemit_binop_left(FuncState *fs, BinOpr op, ExpDesc *e)
  901. {
  902.     if (op == OPR_AND) {
  903.         bcemit_branch_t(fs, e);
  904.     } else if (op == OPR_OR) {
  905.         bcemit_branch_f(fs, e);
  906.     } else if (op == OPR_CONCAT) {
  907.         expr_tonextreg(fs, e);
  908.     } else if (op == OPR_EQ || op == OPR_NE) {
  909.         if (!expr_isk_nojump(e))
  910.             expr_toanyreg(fs, e);
  911.     } else {
  912.         if (!expr_isnumk_nojump(e))
  913.             expr_toanyreg(fs, e);
  914.     }
  915. }

  916. /* Emit binary operator. */
  917. static void bcemit_binop(FuncState *fs, BinOpr op, ExpDesc *e1, ExpDesc *e2)
  918. {
  919.     if (op <= OPR_POW) {
  920.         bcemit_arith(fs, op, e1, e2);
  921.     } else if (op == OPR_AND) {
  922.         kp_assert(e1->t == NO_JMP);  /* List must be closed. */
  923.         expr_discharge(fs, e2);
  924.         jmp_append(fs, &e2->f, e1->f);
  925.         *e1 = *e2;
  926.     } else if (op == OPR_OR) {
  927.         kp_assert(e1->f == NO_JMP);  /* List must be closed. */
  928.         expr_discharge(fs, e2);
  929.         jmp_append(fs, &e2->t, e1->t);
  930.         *e1 = *e2;
  931.     } else if (op == OPR_CONCAT) {
  932.         expr_toval(fs, e2);
  933.         if (e2->k == VRELOCABLE && bc_op(*bcptr(fs, e2)) == BC_CAT) {
  934.             kp_assert(e1->u.s.info == bc_b(*bcptr(fs, e2))-1);
  935.             expr_free(fs, e1);
  936.             setbc_b(bcptr(fs, e2), e1->u.s.info);
  937.             e1->u.s.info = e2->u.s.info;
  938.         } else {
  939.             expr_tonextreg(fs, e2);
  940.             expr_free(fs, e2);
  941.             expr_free(fs, e1);
  942.             e1->u.s.info = bcemit_ABC(fs, BC_CAT, 0, e1->u.s.info,
  943.                                  e2->u.s.info);
  944.         }
  945.         e1->k = VRELOCABLE;
  946.     } else {
  947.         kp_assert(op == OPR_NE || op == OPR_EQ || op == OPR_LT ||
  948.               op == OPR_GE || op == OPR_LE || op == OPR_GT);
  949.         bcemit_comp(fs, op, e1, e2);
  950.     }
  951. }

  952. /* Emit unary operator. */
  953. static void bcemit_unop(FuncState *fs, BCOp op, ExpDesc *e)
  954. {
  955.     if (op == BC_NOT) {
  956.         /* Swap true and false lists. */
  957.         { BCPos temp = e->f; e->f = e->t; e->t = temp; }
  958.         jmp_dropval(fs, e->f);
  959.         jmp_dropval(fs, e->t);
  960.         expr_discharge(fs, e);
  961.         if (e->k == VKNIL || e->k == VKFALSE) {
  962.             e->k = VKTRUE;
  963.             return;
  964.         } else if (expr_isk(e)) {
  965.             e->k = VKFALSE;
  966.             return;
  967.         } else if (e->k == VJMP) {
  968.             invertcond(fs, e);
  969.             return;
  970.         } else if (e->k == VRELOCABLE) {
  971.             bcreg_reserve(fs, 1);
  972.             setbc_a(bcptr(fs, e), fs->freereg-1);
  973.             e->u.s.info = fs->freereg-1;
  974.             e->k = VNONRELOC;
  975.         } else {
  976.             kp_assert(e->k == VNONRELOC);
  977.         }
  978.     } else {
  979.         kp_assert(op == BC_UNM || op == BC_LEN);
  980.         /* Constant-fold negations. */
  981.         if (op == BC_UNM && !expr_hasjump(e)) {
  982.             /* Avoid folding to -0. */
  983.             if (expr_isnumk(e) && !expr_numiszero(e)) {
  984.                 ktap_val_t *o = expr_numtv(e);
  985.                 if (is_number(o))
  986.                     set_number(o, -nvalue(o));
  987.                 return;
  988.             }
  989.         }
  990.         expr_toanyreg(fs, e);
  991.     }
  992.     expr_free(fs, e);
  993.     e->u.s.info = bcemit_AD(fs, op, 0, e->u.s.info);
  994.     e->k = VRELOCABLE;
  995. }

  996. /* -- Lexer support ------------------------------------------------------- */

  997. /* Check and consume optional token. */
  998. static int lex_opt(LexState *ls, LexToken tok)
  999. {
  1000.     if (ls->tok == tok) {
  1001.         kp_lex_next(ls);
  1002.         return 1;
  1003.     }
  1004.     return 0;
  1005. }

  1006. /* Check and consume token. */
  1007. static void lex_check(LexState *ls, LexToken tok)
  1008. {
  1009.     if (ls->tok != tok)
  1010.         err_token(ls, tok);
  1011.     kp_lex_next(ls);
  1012. }

  1013. /* Check for matching token. */
  1014. static void lex_match(LexState *ls, LexToken what, LexToken who, BCLine line)
  1015. {
  1016.     if (!lex_opt(ls, what)) {
  1017.         if (line == ls->linenumber) {
  1018.             err_token(ls, what);
  1019.         } else {
  1020.             const char *swhat = kp_lex_token2str(ls, what);
  1021.             const char *swho = kp_lex_token2str(ls, who);
  1022.             kp_lex_error(ls, ls->tok, KP_ERR_XMATCH, swhat, swho,
  1023.                                 line);
  1024.         }
  1025.     }
  1026. }

  1027. /* Check for string token. */
  1028. static ktap_str_t *lex_str(LexState *ls)
  1029. {
  1030.     ktap_str_t *s;

  1031.     if (ls->tok != TK_name)
  1032.         err_token(ls, TK_name);
  1033.     s = rawtsvalue(&ls->tokval);
  1034.     kp_lex_next(ls);
  1035.     return s;
  1036. }

  1037. /* -- Variable handling --------------------------------------------------- */

  1038. #define var_get(ls, fs, i)    ((ls)->vstack[(fs)->varmap[(i)]])

  1039. /* Define a new local variable. */
  1040. static void var_new(LexState *ls, BCReg n, ktap_str_t *name)
  1041. {
  1042.     FuncState *fs = ls->fs;
  1043.     int vtop = ls->vtop;

  1044.     checklimit(fs, fs->nactvar+n, KP_MAX_LOCVAR, "local variables");
  1045.     if (vtop >= ls->sizevstack) {
  1046.         if (ls->sizevstack >= KP_MAX_VSTACK)
  1047.             kp_lex_error(ls, 0, KP_ERR_XLIMC, KP_MAX_VSTACK);
  1048.         if (!ls->vstack) {
  1049.             ls->vstack = malloc(sizeof(VarInfo) * 20);
  1050.             ls->sizevstack = 20;
  1051.         } else {
  1052.             ls->vstack = realloc(ls->vstack,
  1053.                 ls->sizevstack * sizeof(VarInfo) * 2);
  1054.             ls->sizevstack = ls->sizevstack * 2;
  1055.         }
  1056.     }
  1057.     kp_assert((uintptr_t)name < VARNAME__MAX ||
  1058.             kp_tab_getstr(fs->kt, name) != NULL);
  1059.     ls->vstack[vtop].name = name;
  1060.     fs->varmap[fs->nactvar+n] = (uint16_t)vtop;
  1061.     ls->vtop = vtop+1;
  1062. }

  1063. #define var_new_lit(ls, n, v) \
  1064.     var_new(ls, (n), kp_parse_keepstr(ls, "" v, sizeof(v)-1))

  1065. #define var_new_fixed(ls, n, vn) \
  1066.     var_new(ls, (n), (ktap_str_t *)(uintptr_t)(vn))

  1067. /* Add local variables. */
  1068. static void var_add(LexState *ls, BCReg nvars)
  1069. {
  1070.     FuncState *fs = ls->fs;
  1071.     BCReg nactvar = fs->nactvar;

  1072.     while (nvars--) {
  1073.         VarInfo *v = &var_get(ls, fs, nactvar);
  1074.         v->startpc = fs->pc;
  1075.         v->slot = nactvar++;
  1076.         v->info = 0;
  1077.     }
  1078.     fs->nactvar = nactvar;
  1079. }

  1080. /* Remove local variables. */
  1081. static void var_remove(LexState *ls, BCReg tolevel)
  1082. {
  1083.     FuncState *fs = ls->fs;
  1084.     while (fs->nactvar > tolevel)
  1085.         var_get(ls, fs, --fs->nactvar).endpc = fs->pc;
  1086. }

  1087. /* Lookup local variable name. */
  1088. static BCReg var_lookup_local(FuncState *fs, ktap_str_t *n)
  1089. {
  1090.     int i;

  1091.     for (i = fs->nactvar-1; i >= 0; i--) {
  1092.         if (n == var_get(fs->ls, fs, i).name)
  1093.             return (BCReg)i;
  1094.     }
  1095.     return (BCReg)-1/* Not found. */
  1096. }

  1097. /* Lookup or add upvalue index. */
  1098. static int var_lookup_uv(FuncState *fs, int vidx, ExpDesc *e)
  1099. {
  1100.     int i, n = fs->nuv;

  1101.     for (i = 0; i < n; i++)
  1102.         if (fs->uvmap[i] == vidx)
  1103.             return i;  /* Already exists. */

  1104.     /* Otherwise create a new one. */
  1105.     checklimit(fs, fs->nuv, KP_MAX_UPVAL, "upvalues");
  1106.     kp_assert(e->k == VLOCAL || e->k == VUPVAL);
  1107.     fs->uvmap[n] = (uint16_t)vidx;
  1108.     fs->uvtmp[n] = (uint16_t)(e->k == VLOCAL ? vidx :
  1109.             KP_MAX_VSTACK+e->u.s.info);
  1110.     fs->nuv = n+1;
  1111.     return n;
  1112. }

  1113. /* Forward declaration. */
  1114. static void fscope_uvmark(FuncState *fs, BCReg level);

  1115. /* Recursively lookup variables in enclosing functions. */
  1116. static int var_lookup_(FuncState *fs, ktap_str_t *name, ExpDesc *e,
  1117.              int first)
  1118. {
  1119.     if (fs) {
  1120.         BCReg reg = var_lookup_local(fs, name);
  1121.         if ((int32_t)reg >= 0) {  /* Local in this function? */
  1122.             expr_init(e, VLOCAL, reg);
  1123.             if (!first) {
  1124.                 /* Scope now has an upvalue. */
  1125.                 fscope_uvmark(fs, reg);
  1126.             }
  1127.             return (int)(e->u.s.aux = (uint32_t)fs->varmap[reg]);
  1128.         } else {
  1129.             /* Var in outer func? */
  1130.             int vidx = var_lookup_(fs->prev, name, e, 0);
  1131.             if ((int32_t)vidx >= 0) {
  1132.                 /* Yes, make it an upvalue here. */
  1133.                 e->u.s.info =
  1134.                     (uint8_t)var_lookup_uv(fs, vidx, e);
  1135.                 e->k = VUPVAL;
  1136.                 return vidx;
  1137.             }
  1138.         }
  1139.     } else/* Not found in any function, must be a global. */
  1140.         expr_init(e, VGLOBAL, 0);
  1141.         e->u.sval = name;
  1142.     }
  1143.     return (int)-1/* Global. */
  1144. }

  1145. /* Lookup variable name. */
  1146. #define var_lookup(ls, e) \
  1147.     var_lookup_((ls)->fs, lex_str(ls), (e), 1)

  1148. /* -- Goto an label handling ---------------------------------------------- */

  1149. /* Add a new goto or label. */
  1150. static int gola_new(LexState *ls, ktap_str_t *name, uint8_t info, BCPos pc)
  1151. {
  1152.     FuncState *fs = ls->fs;
  1153.     int vtop = ls->vtop;

  1154.     if (vtop >= ls->sizevstack) {
  1155.         if (ls->sizevstack >= KP_MAX_VSTACK)
  1156.             kp_lex_error(ls, 0, KP_ERR_XLIMC, KP_MAX_VSTACK);
  1157.         if (!ls->vstack) {
  1158.             ls->vstack = malloc(sizeof(VarInfo) * 20);
  1159.             ls->sizevstack = 20;
  1160.         } else {
  1161.             ls->vstack = realloc(ls->vstack,
  1162.                     ls->sizevstack * sizeof(VarInfo) * 2);
  1163.             ls->sizevstack = ls->sizevstack * 2;
  1164.         }
  1165.     }
  1166.     kp_assert(name == NAME_BREAK ||
  1167.           kp_tab_getstr(fs->kt, name) != NULL);
  1168.     ls->vstack[vtop].name = name;
  1169.     ls->vstack[vtop].startpc = pc;
  1170.     ls->vstack[vtop].slot = (uint8_t)fs->nactvar;
  1171.     ls->vstack[vtop].info = info;
  1172.     ls->vtop = vtop+1;
  1173.     return vtop;
  1174. }

  1175. #define gola_isgoto(v)        ((v)->info & VSTACK_GOTO)
  1176. #define gola_islabel(v)        ((v)->info & VSTACK_LABEL)
  1177. #define gola_isgotolabel(v)    ((v)->info & (VSTACK_GOTO|VSTACK_LABEL))

  1178. /* Patch goto to jump to label. */
  1179. static void gola_patch(LexState *ls, VarInfo *vg, VarInfo *vl)
  1180. {
  1181.     FuncState *fs = ls->fs;
  1182.     BCPos pc = vg->startpc;

  1183.     vg->name = NULL; /* Invalidate pending goto. */
  1184.     setbc_a(&fs->bcbase[pc].ins, vl->slot);
  1185.     jmp_patch(fs, pc, vl->startpc);
  1186. }

  1187. /* Patch goto to close upvalues. */
  1188. static void gola_close(LexState *ls, VarInfo *vg)
  1189. {
  1190.     FuncState *fs = ls->fs;
  1191.     BCPos pc = vg->startpc;
  1192.     BCIns *ip = &fs->bcbase[pc].ins;
  1193.     kp_assert(gola_isgoto(vg));
  1194.     kp_assert(bc_op(*ip) == BC_JMP || bc_op(*ip) == BC_UCLO);
  1195.     setbc_a(ip, vg->slot);
  1196.     if (bc_op(*ip) == BC_JMP) {
  1197.         BCPos next = jmp_next(fs, pc);
  1198.         if (next != NO_JMP)
  1199.             jmp_patch(fs, next, pc);  /* Jump to UCLO. */
  1200.         setbc_op(ip, BC_UCLO);  /* Turn into UCLO. */
  1201.         setbc_j(ip, NO_JMP);
  1202.     }
  1203. }

  1204. /* Resolve pending forward gotos for label. */
  1205. static void gola_resolve(LexState *ls, FuncScope *bl, int idx)
  1206. {
  1207.     VarInfo *vg = ls->vstack + bl->vstart;
  1208.     VarInfo *vl = ls->vstack + idx;
  1209.     for (; vg < vl; vg++)
  1210.         if (vg->name == vl->name && gola_isgoto(vg)) {
  1211.             if (vg->slot < vl->slot) {
  1212.                 ktap_str_t *name =
  1213.                     var_get(ls, ls->fs, vg->slot).name;
  1214.                 kp_assert((uintptr_t)name >= VARNAME__MAX);
  1215.                 ls->linenumber =
  1216.                     ls->fs->bcbase[vg->startpc].line;
  1217.                 kp_assert(vg->name != NAME_BREAK);
  1218.                 kp_lex_error(ls, 0, KP_ERR_XGSCOPE,
  1219.                 getstr(vg->name), getstr(name));
  1220.             }
  1221.             gola_patch(ls, vg, vl);
  1222.         }
  1223. }

  1224. /* Fixup remaining gotos and labels for scope. */
  1225. static void gola_fixup(LexState *ls, FuncScope *bl)
  1226. {
  1227.     VarInfo *v = ls->vstack + bl->vstart;
  1228.     VarInfo *ve = ls->vstack + ls->vtop;

  1229.     for (; v < ve; v++) {
  1230.         ktap_str_t *name = v->name;
  1231.         /* Only consider remaining valid gotos/labels. */
  1232.         if (name != NULL) {
  1233.             if (gola_islabel(v)) {
  1234.                 VarInfo *vg;
  1235.                 /* Invalidate label that goes out of scope. */
  1236.                 v->name = NULL;
  1237.                 /* Resolve pending backward gotos. */
  1238.                 for (vg = v+1; vg < ve; vg++)
  1239.                     if (vg->name == name &&
  1240.                         gola_isgoto(vg)) {
  1241.                         if ((bl->flags&FSCOPE_UPVAL) &&
  1242.                              vg->slot > v->slot)
  1243.                             gola_close(ls, vg);
  1244.                         gola_patch(ls, vg, v);
  1245.                     }
  1246.             } else if (gola_isgoto(v)) {
  1247.                 /* Propagate goto or break to outer scope. */
  1248.                 if (bl->prev) {
  1249.                     bl->prev->flags |= name == NAME_BREAK ?                         FSCOPE_BREAK : FSCOPE_GOLA;
  1250.                     v->slot = bl->nactvar;
  1251.                     if ((bl->flags & FSCOPE_UPVAL))
  1252.                         gola_close(ls, v);
  1253.                 } else {
  1254.                     ls->linenumber =
  1255.                     ls->fs->bcbase[v->startpc].line;
  1256.                     if (name == NAME_BREAK)
  1257.                         kp_lex_error(ls, 0, KP_ERR_XBREAK);
  1258.                     else
  1259.                         kp_lex_error(ls, 0, KP_ERR_XLUNDEF, getstr(name));
  1260.                 }
  1261.             }
  1262.         }
  1263.     }
  1264. }

  1265. /* Find existing label. */
  1266. static VarInfo *gola_findlabel(LexState *ls, ktap_str_t *name)
  1267. {
  1268.     VarInfo *v = ls->vstack + ls->fs->bl->vstart;
  1269.     VarInfo *ve = ls->vstack + ls->vtop;

  1270.     for (; v < ve; v++)
  1271.         if (v->name == name && gola_islabel(v))
  1272.             return v;
  1273.     return NULL;
  1274. }

  1275. /* -- Scope handling ------------------------------------------------------ */

  1276. /* Begin a scope. */
  1277. static void fscope_begin(FuncState *fs, FuncScope *bl, int flags)
  1278. {
  1279.     bl->nactvar = (uint8_t)fs->nactvar;
  1280.     bl->flags = flags;
  1281.     bl->vstart = fs->ls->vtop;
  1282.     bl->prev = fs->bl;
  1283.     fs->bl = bl;
  1284.     kp_assert(fs->freereg == fs->nactvar);
  1285. }

  1286. /* End a scope. */
  1287. static void fscope_end(FuncState *fs)
  1288. {
  1289.     FuncScope *bl = fs->bl;
  1290.     LexState *ls = fs->ls;

  1291.     fs->bl = bl->prev;
  1292.     var_remove(ls, bl->nactvar);
  1293.     fs->freereg = fs->nactvar;
  1294.     kp_assert(bl->nactvar == fs->nactvar);
  1295.     if ((bl->flags & (FSCOPE_UPVAL|FSCOPE_NOCLOSE)) == FSCOPE_UPVAL)
  1296.         bcemit_AJ(fs, BC_UCLO, bl->nactvar, 0);
  1297.     if ((bl->flags & FSCOPE_BREAK)) {
  1298.         if ((bl->flags & FSCOPE_LOOP)) {
  1299.             int idx = gola_new(ls, NAME_BREAK, VSTACK_LABEL,
  1300.                         fs->pc);
  1301.             ls->vtop = idx;  /* Drop break label immediately. */
  1302.             gola_resolve(ls, bl, idx);
  1303.             return;
  1304.         }  /* else: need the fixup step to propagate the breaks. */
  1305.     } else if (!(bl->flags & FSCOPE_GOLA)) {
  1306.         return;
  1307.     }
  1308.     gola_fixup(ls, bl);
  1309. }

  1310. /* Mark scope as having an upvalue. */
  1311. static void fscope_uvmark(FuncState *fs, BCReg level)
  1312. {
  1313.     FuncScope *bl;

  1314.     for (bl = fs->bl; bl && bl->nactvar > level; bl = bl->prev);
  1315.     if (bl)
  1316.         bl->flags |= FSCOPE_UPVAL;
  1317. }

  1318. /* -- Function state management ------------------------------------------- */

  1319. /* Fixup bytecode for prototype. */
  1320. static void fs_fixup_bc(FuncState *fs, ktap_proto_t *pt, BCIns *bc, int n)
  1321. {
  1322.     BCInsLine *base = fs->bcbase;
  1323.     int i;

  1324.     pt->sizebc = n;
  1325.     bc[0] = BCINS_AD((fs->flags & PROTO_VARARG) ? BC_FUNCV : BC_FUNCF,
  1326.              fs->framesize, 0);
  1327.     for (i = 1; i < n; i++)
  1328.         bc[i] = base[i].ins;
  1329. }

  1330. /* Fixup upvalues for child prototype, step #2. */
  1331. static void fs_fixup_uv2(FuncState *fs, ktap_proto_t *pt)
  1332. {
  1333.     VarInfo *vstack = fs->ls->vstack;
  1334.     uint16_t *uv = pt->uv;
  1335.     int i, n = pt->sizeuv;

  1336.     for (i = 0; i < n; i++) {
  1337.         VarIndex vidx = uv[i];
  1338.         if (vidx >= KP_MAX_VSTACK)
  1339.             uv[i] = vidx - KP_MAX_VSTACK;
  1340.         else if ((vstack[vidx].info & VSTACK_VAR_RW))
  1341.             uv[i] = vstack[vidx].slot | PROTO_UV_LOCAL;
  1342.         else
  1343.             uv[i] = vstack[vidx].slot | PROTO_UV_LOCAL |
  1344.                     PROTO_UV_IMMUTABLE;
  1345.     }
  1346. }

  1347. /* Fixup constants for prototype. */
  1348. static void fs_fixup_k(FuncState *fs, ktap_proto_t *pt, void *kptr)
  1349. {
  1350.     ktap_tab_t *kt;
  1351.     ktap_node_t *node;
  1352.     int i, hmask;

  1353.     checklimitgt(fs, fs->nkn, BCMAX_D+1, "constants");
  1354.     checklimitgt(fs, fs->nkgc, BCMAX_D+1, "constants");

  1355.     pt->k = kptr;
  1356.     pt->sizekn = fs->nkn;
  1357.     pt->sizekgc = fs->nkgc;
  1358.     kt = fs->kt;
  1359.     node = kt->node;
  1360.     hmask = kt->hmask;
  1361.     for (i = 0; i <= hmask; i++) {
  1362.         ktap_node_t *n = &node[i];

  1363.         if (tvhaskslot(&n->val)) {
  1364.             ptrdiff_t kidx = (ptrdiff_t)tvkslot(&n->val);
  1365.             kp_assert(!is_number(&n->key));
  1366.             if (is_number(&n->key)) {
  1367.                 ktap_val_t *tv = &((ktap_val_t *)kptr)[kidx];
  1368.                 *tv = n->key;
  1369.             } else {
  1370.                 ktap_obj_t *o = n->key.val.gc;
  1371.                 ktap_obj_t **v = (ktap_obj_t **)kptr;
  1372.                 v[~kidx] = o;
  1373.                 if (is_proto(&n->key))
  1374.                     fs_fixup_uv2(fs, (ktap_proto_t *)o);
  1375.             }
  1376.         }
  1377.     }
  1378. }

  1379. /* Fixup upvalues for prototype, step #1. */
  1380. static void fs_fixup_uv1(FuncState *fs, ktap_proto_t *pt, uint16_t *uv)
  1381. {
  1382.     pt->uv = uv;
  1383.     pt->sizeuv = fs->nuv;
  1384.     memcpy(uv, fs->uvtmp, fs->nuv*sizeof(VarIndex));
  1385. }

  1386. #ifndef KTAP_DISABLE_LINEINFO
  1387. /* Prepare lineinfo for prototype. */
  1388. static size_t fs_prep_line(FuncState *fs, BCLine numline)
  1389. {
  1390.     return (fs->pc-1) << (numline < 256 ? 0 : numline < 65536 ? 1 : 2);
  1391. }

  1392. /* Fixup lineinfo for prototype. */
  1393. static void fs_fixup_line(FuncState *fs, ktap_proto_t *pt,
  1394.               void *lineinfo, BCLine numline)
  1395. {
  1396.     BCInsLine *base = fs->bcbase + 1;
  1397.     BCLine first = fs->linedefined;
  1398.     int i = 0, n = fs->pc-1;

  1399.     pt->firstline = fs->linedefined;
  1400.     pt->numline = numline;
  1401.     pt->lineinfo = lineinfo;
  1402.     if (numline < 256) {
  1403.         uint8_t *li = (uint8_t *)lineinfo;
  1404.         do {
  1405.             BCLine delta = base[i].line - first;
  1406.             kp_assert(delta >= 0 && delta < 256);
  1407.             li[i] = (uint8_t)delta;
  1408.         } while (++i < n);
  1409.     } else if (numline < 65536) {
  1410.         uint16_t *li = (uint16_t *)lineinfo;
  1411.         do {
  1412.             BCLine delta = base[i].line - first;
  1413.             kp_assert(delta >= 0 && delta < 65536);
  1414.             li[i] = (uint16_t)delta;
  1415.         } while (++i < n);
  1416.     } else {
  1417.         uint32_t *li = (uint32_t *)lineinfo;
  1418.         do {
  1419.             BCLine delta = base[i].line - first;
  1420.             kp_assert(delta >= 0);
  1421.             li[i] = (uint32_t)delta;
  1422.         } while (++i < n);
  1423.     }
  1424. }

  1425. /* Prepare variable info for prototype. */
  1426. static size_t fs_prep_var(LexState *ls, FuncState *fs, size_t *ofsvar)
  1427. {
  1428.     VarInfo *vs =ls->vstack, *ve;
  1429.     int i, n;
  1430.     BCPos lastpc;

  1431.     kp_buf_reset(&ls->sb);  /* Copy to temp. string buffer. */
  1432.     /* Store upvalue names. */
  1433.     for (i = 0, n = fs->nuv; i < n; i++) {
  1434.         ktap_str_t *s = vs[fs->uvmap[i]].name;
  1435.         int len = s->len+1;
  1436.         char *p = kp_buf_more(&ls->sb, len);
  1437.         p = kp_buf_wmem(p, getstr(s), len);
  1438.         setsbufP(&ls->sb, p);
  1439.     }

  1440.     *ofsvar = sbuflen(&ls->sb);
  1441.     lastpc = 0;
  1442.     /* Store local variable names and compressed ranges. */
  1443.     for (ve = vs + ls->vtop, vs += fs->vbase; vs < ve; vs++) {
  1444.         if (!gola_isgotolabel(vs)) {
  1445.             ktap_str_t *s = vs->name;
  1446.             BCPos startpc;
  1447.             char *p;
  1448.             if ((uintptr_t)s < VARNAME__MAX) {
  1449.                 p = kp_buf_more(&ls->sb, 1 + 2*5);
  1450.                 *p++ = (char)(uintptr_t)s;
  1451.             } else {
  1452.                 int len = s->len+1;
  1453.                 p = kp_buf_more(&ls->sb, len + 2*5);
  1454.                 p = kp_buf_wmem(p, getstr(s), len);
  1455.             }
  1456.             startpc = vs->startpc;
  1457.             p = strfmt_wuleb128(p, startpc-lastpc);
  1458.             p = strfmt_wuleb128(p, vs->endpc-startpc);
  1459.             setsbufP(&ls->sb, p);
  1460.             lastpc = startpc;
  1461.         }
  1462.     }

  1463.     kp_buf_putb(&ls->sb, '\0');  /* Terminator for varinfo. */
  1464.     return sbuflen(&ls->sb);
  1465. }

  1466. /* Fixup variable info for prototype. */
  1467. static void fs_fixup_var(LexState *ls, ktap_proto_t *pt, uint8_t *p,
  1468.              size_t ofsvar)
  1469. {
  1470.     pt->uvinfo = p;
  1471.     pt->varinfo = (char *)p + ofsvar;
  1472.     /* Copy from temp. buffer. */
  1473.     memcpy(p, sbufB(&ls->sb), sbuflen(&ls->sb));
  1474. }
  1475. #else

  1476. /* Initialize with empty debug info, if disabled. */
  1477. #define fs_prep_line(fs, numline)        (UNUSED(numline), 0)
  1478. #define fs_fixup_line(fs, pt, li, numline) \
  1479.   pt->firstline = pt->numline = 0, (pt)->lineinfo = NULL
  1480. #define fs_prep_var(ls, fs, ofsvar)        (UNUSED(ofsvar), 0)
  1481. #define fs_fixup_var(ls, pt, p, ofsvar) \
  1482.   (pt)->uvinfo = NULL, (pt)->varinfo = NULL

  1483. #endif

  1484. /* Check if bytecode op returns. */
  1485. static int bcopisret(BCOp op)
  1486. {
  1487.     switch (op) {
  1488.     case BC_CALLMT: case BC_CALLT:
  1489.     case BC_RETM: case BC_RET: case BC_RET0: case BC_RET1:
  1490.         return 1;
  1491.     default:
  1492.         return 0;
  1493.     }
  1494. }

  1495. /* Fixup return instruction for prototype. */
  1496. static void fs_fixup_ret(FuncState *fs)
  1497. {
  1498.     BCPos lastpc = fs->pc;

  1499.     if (lastpc <= fs->lasttarget ||
  1500.         !bcopisret(bc_op(fs->bcbase[lastpc-1].ins))) {
  1501.         if ((fs->bl->flags & FSCOPE_UPVAL))
  1502.             bcemit_AJ(fs, BC_UCLO, 0, 0);
  1503.         bcemit_AD(fs, BC_RET0, 0, 1);  /* Need final return. */
  1504.     }
  1505.     fs->bl->flags |= FSCOPE_NOCLOSE/* Handled above. */
  1506.     fscope_end(fs);
  1507.     kp_assert(fs->bl == NULL);
  1508.     /* May need to fixup returns encoded before first function
  1509.      * was created. */
  1510.     if (fs->flags & PROTO_FIXUP_RETURN) {
  1511.         BCPos pc;
  1512.         for (pc = 1; pc < lastpc; pc++) {
  1513.             BCIns ins = fs->bcbase[pc].ins;
  1514.             BCPos offset;
  1515.             switch (bc_op(ins)) {
  1516.             case BC_CALLMT: case BC_CALLT:
  1517.             case BC_RETM: case BC_RET: case BC_RET0: case BC_RET1:
  1518.                 /* Copy original instruction. */
  1519.                 offset = bcemit_INS(fs, ins);
  1520.                 fs->bcbase[offset].line = fs->bcbase[pc].line;
  1521.                 offset = offset-(pc+1)+BCBIAS_J;
  1522.                 if (offset > BCMAX_D)
  1523.                     err_syntax(fs->ls, KP_ERR_XFIXUP);
  1524.                 /* Replace with UCLO plus branch. */
  1525.                 fs->bcbase[pc].ins = BCINS_AD(BC_UCLO, 0,
  1526.                                 offset);
  1527.                 break;
  1528.             case BC_UCLO:
  1529.                 return/* We're done. */
  1530.             default:
  1531.                 break;
  1532.             }
  1533.         }
  1534.     }
  1535. }

  1536. /* Finish a FuncState and return the new prototype. */
  1537. static ktap_proto_t *fs_finish(LexState *ls, BCLine line)
  1538. {
  1539.     FuncState *fs = ls->fs;
  1540.     BCLine numline = line - fs->linedefined;
  1541.     size_t sizept, ofsk, ofsuv, ofsli, ofsdbg, ofsvar;
  1542.     ktap_proto_t *pt;

  1543.     /* Apply final fixups. */
  1544.     fs_fixup_ret(fs);

  1545.     /* Calculate total size of prototype including all colocated arrays. */
  1546.     sizept = sizeof(ktap_proto_t) + fs->pc*sizeof(BCIns) +
  1547.             fs->nkgc*sizeof(ktap_obj_t *);
  1548.     sizept = (sizept + sizeof(ktap_val_t)-1) & ~(sizeof(ktap_val_t)-1);
  1549.     ofsk = sizept; sizept += fs->nkn*sizeof(ktap_val_t);
  1550.     ofsuv = sizept; sizept += ((fs->nuv+1)&~1)*2;
  1551.     ofsli = sizept; sizept += fs_prep_line(fs, numline);
  1552.     ofsdbg = sizept; sizept += fs_prep_var(ls, fs, &ofsvar);

  1553.     /* Allocate prototype and initialize its fields. */
  1554.     pt = (ktap_proto_t *)malloc((int)sizept);
  1555.     pt->gct = ~KTAP_TPROTO;
  1556.     pt->sizept = (int)sizept;
  1557.     pt->flags =
  1558.         (uint8_t)(fs->flags & ~(PROTO_HAS_RETURN|PROTO_FIXUP_RETURN));
  1559.     pt->numparams = fs->numparams;
  1560.     pt->framesize = fs->framesize;
  1561.     pt->chunkname = ls->chunkname;

  1562.     /* Close potentially uninitialized gap between bc and kgc. */
  1563.     *(uint32_t *)((char *)pt + ofsk - sizeof(ktap_obj_t *)*(fs->nkgc+1)) = 0;
  1564.     fs_fixup_bc(fs, pt, (BCIns *)((char *)pt + sizeof(ktap_proto_t)), fs->pc);
  1565.     fs_fixup_k(fs, pt, (void *)((char *)pt + ofsk));
  1566.     fs_fixup_uv1(fs, pt, (uint16_t *)((char *)pt + ofsuv));
  1567.     fs_fixup_line(fs, pt, (void *)((char *)pt + ofsli), numline);
  1568.     fs_fixup_var(ls, pt, (uint8_t *)((char *)pt + ofsdbg), ofsvar);

  1569.     ls->vtop = fs->vbase;  /* Reset variable stack. */
  1570.     ls->fs = fs->prev;
  1571.     kp_assert(ls->fs != NULL || ls->tok == TK_eof);
  1572.     return pt;
  1573. }

  1574. /* Initialize a new FuncState. */
  1575. static void fs_init(LexState *ls, FuncState *fs)
  1576. {
  1577.     fs->prev = ls->fs; ls->fs = fs;  /* Append to list. */
  1578.     fs->ls = ls;
  1579.     fs->vbase = ls->vtop;
  1580.     fs->pc = 0;
  1581.     fs->lasttarget = 0;
  1582.     fs->jpc = NO_JMP;
  1583.     fs->freereg = 0;
  1584.     fs->nkgc = 0;
  1585.     fs->nkn = 0;
  1586.     fs->nactvar = 0;
  1587.     fs->nuv = 0;
  1588.     fs->bl = NULL;
  1589.     fs->flags = 0;
  1590.     fs->framesize = 1/* Minimum frame size. */
  1591.     fs->kt = kp_tab_new();
  1592. }

  1593. /* -- Expressions --------------------------------------------------------- */

  1594. /* Forward declaration. */
  1595. static void expr(LexState *ls, ExpDesc *v);

  1596. /* Return string expression. */
  1597. static void expr_str(LexState *ls, ExpDesc *e)
  1598. {
  1599.     expr_init(e, VKSTR, 0);
  1600.     e->u.sval = lex_str(ls);
  1601. }

  1602. #define checku8(x)     ((x) == (int32_t)(uint8_t)(x))

  1603. /* Return index expression. */
  1604. static void expr_index(FuncState *fs, ExpDesc *t, ExpDesc *e)
  1605. {
  1606.     /* Already called: expr_toval(fs, e). */
  1607.     t->k = VINDEXED;
  1608.     if (expr_isnumk(e)) {
  1609.         ktap_number n = expr_numberV(e);
  1610.         int32_t k = (int)n;
  1611.         if (checku8(k) && n == (ktap_number)k) {
  1612.             /* 256..511: const byte key */
  1613.             t->u.s.aux = BCMAX_C+1+(uint32_t)k;
  1614.             return;
  1615.         }
  1616.     } else if (expr_isstrk(e)) {
  1617.         BCReg idx = const_str(fs, e);
  1618.         if (idx <= BCMAX_C) {
  1619.             /* -256..-1: const string key */
  1620.             t->u.s.aux = ~idx;
  1621.             return;
  1622.         }
  1623.     }
  1624.     t->u.s.aux = expr_toanyreg(fs, e);  /* 0..255: register */
  1625. }

  1626. /* Parse index expression with named field. */
  1627. static void expr_field(LexState *ls, ExpDesc *v)
  1628. {
  1629.     FuncState *fs = ls->fs;
  1630.     ExpDesc key;

  1631.     expr_toanyreg(fs, v);
  1632.     kp_lex_next(ls);  /* Skip dot or colon. */
  1633.     expr_str(ls, &key);
  1634.     expr_index(fs, v, &key);
  1635. }

  1636. /* Parse index expression with brackets. */
  1637. static void expr_bracket(LexState *ls, ExpDesc *v)
  1638. {
  1639.     kp_lex_next(ls);  /* Skip '['. */
  1640.     expr(ls, v);
  1641.     expr_toval(ls->fs, v);
  1642.     lex_check(ls, ']');
  1643. }

  1644. /* Get value of constant expression. */
  1645. static void expr_kvalue(ktap_val_t *v, ExpDesc *e)
  1646. {
  1647.     if (e->k <= VKTRUE) {
  1648.         setitype(v, ~(uint32_t)e->k);
  1649.     } else if (e->k == VKSTR) {
  1650.         set_string(v, e->u.sval);
  1651.     } else {
  1652.         kp_assert(tvisnumber(expr_numtv(e)));
  1653.         *v = *expr_numtv(e);
  1654.     }
  1655. }

  1656. #define FLS(x)       ((uint32_t)(__builtin_clz(x)^31))
  1657. #define hsize2hbits(s) ((s) ? ((s)==1 ? 1 : 1+FLS((uint32_t)((s)-1))) : 0)


  1658. /* Parse table constructor expression. */
  1659. static void expr_table(LexState *ls, ExpDesc *e)
  1660. {
  1661.     FuncState *fs = ls->fs;
  1662.     BCLine line = ls->linenumber;
  1663.     ktap_tab_t *t = NULL;
  1664.     int vcall = 0, needarr = 0, fixt = 0;
  1665.     uint32_t narr = 1/* First array index. */
  1666.     uint32_t nhash = 0/* Number of hash entries. */
  1667.     BCReg freg = fs->freereg;
  1668.     BCPos pc = bcemit_AD(fs, BC_TNEW, freg, 0);

  1669.     expr_init(e, VNONRELOC, freg);
  1670.     bcreg_reserve(fs, 1);
  1671.     freg++;
  1672.     lex_check(ls, '{');
  1673.     while (ls->tok != '}') {
  1674.         ExpDesc key, val;
  1675.         vcall = 0;
  1676.         if (ls->tok == '[') {
  1677.             expr_bracket(ls, &key);/* Already calls expr_toval. */
  1678.             if (!expr_isk(&key))
  1679.                 expr_index(fs, e, &key);
  1680.             if (expr_isnumk(&key) && expr_numiszero(&key))
  1681.                 needarr = 1;
  1682.             else
  1683.                 nhash++;
  1684.             lex_check(ls, '=');
  1685.         } else if ((ls->tok == TK_name) &&
  1686.                 kp_lex_lookahead(ls) == '=') {
  1687.             expr_str(ls, &key);
  1688.             lex_check(ls, '=');
  1689.             nhash++;
  1690.         } else {
  1691.             expr_init(&key, VKNUM, 0);
  1692.             set_number(&key.u.nval, (int)narr);
  1693.             narr++;
  1694.             needarr = vcall = 1;
  1695.         }
  1696.         expr(ls, &val);
  1697.         if (expr_isk(&key) && key.k != VKNIL &&
  1698.             (key.k == VKSTR || expr_isk_nojump(&val))) {
  1699.             ktap_val_t k, *v;
  1700.             if (!t) {  /* Create template table on demand. */
  1701.                 BCReg kidx;
  1702.                 t = kp_tab_new();
  1703.                 kidx = const_gc(fs, obj2gco(t), KTAP_TTAB);
  1704.                 fs->bcbase[pc].ins = BCINS_AD(BC_TDUP, freg-1,
  1705.                                  kidx);
  1706.             }
  1707.             vcall = 0;
  1708.             expr_kvalue(&k, &key);
  1709.             v = kp_tab_set(t, &k);
  1710.             /* Add const key/value to template table. */
  1711.             if (expr_isk_nojump(&val)) {
  1712.                 expr_kvalue(v, &val);
  1713.             } else {
  1714.                 /* Otherwise create dummy string key (avoids kp_tab_newkey). */
  1715.                 set_table(v, t);  /* Preserve key with table itself as value. */
  1716.                 fixt = 1;/* Fix this later, after all resizes. */
  1717.                 goto nonconst;
  1718.             }
  1719.         } else {
  1720. nonconst:
  1721.             if (val.k != VCALL) {
  1722.                 expr_toanyreg(fs, &val);
  1723.                 vcall = 0;
  1724.             }
  1725.             if (expr_isk(&key))
  1726.                 expr_index(fs, e, &key);
  1727.             bcemit_store(fs, e, &val);
  1728.         }
  1729.         fs->freereg = freg;
  1730.         if (!lex_opt(ls, ',') && !lex_opt(ls, ';'))
  1731.             break;
  1732.     }
  1733.     lex_match(ls, '}', '{', line);
  1734.     if (vcall) {
  1735.         BCInsLine *ilp = &fs->bcbase[fs->pc-1];
  1736.         ExpDesc en;
  1737.         kp_assert(bc_a(ilp->ins) == freg &&
  1738.             bc_op(ilp->ins) == (narr > 256 ? BC_TSETV : BC_TSETB));
  1739.         expr_init(&en, VKNUM, 0);
  1740.         set_number(&en.u.nval, narr - 1);
  1741.         if (narr > 256) { fs->pc--; ilp--; }
  1742.         ilp->ins = BCINS_AD(BC_TSETM, freg, const_num(fs, &en));
  1743.         setbc_b(&ilp[-1].ins, 0);
  1744.     }
  1745.     if (pc == fs->pc-1) {  /* Make expr relocable if possible. */
  1746.         e->u.s.info = pc;
  1747.         fs->freereg--;
  1748.         e->k = VRELOCABLE;
  1749.     } else {
  1750.         e->k = VNONRELOC;  /* May have been changed by expr_index. */
  1751.     }
  1752.     if (!t) {  /* Construct TNEW RD: hhhhhaaaaaaaaaaa. */
  1753.         BCIns *ip = &fs->bcbase[pc].ins;
  1754.         if (!needarr) narr = 0;
  1755.         else if (narr < 3) narr = 3;
  1756.         else if (narr > 0x7ff) narr = 0x7ff;
  1757.         setbc_d(ip, narr|(hsize2hbits(nhash)<<11));
  1758.     } else {
  1759.         if (fixt) {  /* Fix value for dummy keys in template table. */
  1760.             ktap_node_t *node = t->node;
  1761.             uint32_t i, hmask = t->hmask;
  1762.             for (i = 0; i <= hmask; i++) {
  1763.                 ktap_node_t *n = &node[i];
  1764.                 if (is_table(&n->val)) {
  1765.                     kp_assert(tabV(&n->val) == t);
  1766.                     /* Turn value into nil. */
  1767.                     set_nil(&n->val);
  1768.                 }
  1769.             }
  1770.         }
  1771.     }
  1772. }

  1773. /* Parse function parameters. */
  1774. static BCReg parse_params(LexState *ls, int needself)
  1775. {
  1776.     FuncState *fs = ls->fs;
  1777.     BCReg nparams = 0;
  1778.     lex_check(ls, '(');
  1779.     if (needself)
  1780.         var_new_lit(ls, nparams++, "self");
  1781.     if (ls->tok != ')') {
  1782.         do {
  1783.             if (ls->tok == TK_name) {
  1784.                 var_new(ls, nparams++, lex_str(ls));
  1785.             } else if (ls->tok == TK_dots) {
  1786.                 kp_lex_next(ls);
  1787.                 fs->flags |= PROTO_VARARG;
  1788.                 break;
  1789.             } else {
  1790.                 err_syntax(ls, KP_ERR_XPARAM);
  1791.             }
  1792.         } while (lex_opt(ls, ','));
  1793.     }
  1794.     var_add(ls, nparams);
  1795.     kp_assert(fs->nactvar == nparams);
  1796.     bcreg_reserve(fs, nparams);
  1797.     lex_check(ls, ')');
  1798.     return nparams;
  1799. }

  1800. /* Forward declaration. */
  1801. static void parse_chunk(LexState *ls);

  1802. /* Parse body of a function. */
  1803. static void parse_body(LexState *ls, ExpDesc *e, int needself, BCLine line)
  1804. {
  1805.     FuncState fs, *pfs = ls->fs;
  1806.     FuncScope bl;
  1807.     ktap_proto_t *pt;
  1808.     ptrdiff_t oldbase = pfs->bcbase - ls->bcstack;

  1809.     fs_init(ls, &fs);
  1810.     fscope_begin(&fs, &bl, 0);
  1811.     fs.linedefined = line;
  1812.     fs.numparams = (uint8_t)parse_params(ls, needself);
  1813.     fs.bcbase = pfs->bcbase + pfs->pc;
  1814.     fs.bclim = pfs->bclim - pfs->pc;
  1815.     bcemit_AD(&fs, BC_FUNCF, 0, 0);  /* Placeholder. */
  1816.     lex_check(ls, '{');
  1817.     parse_chunk(ls);
  1818.     lex_check(ls, '}');
  1819.     pt = fs_finish(ls, (ls->lastline = ls->linenumber));
  1820.     pfs->bcbase = ls->bcstack + oldbase;  /* May have been reallocated. */
  1821.     pfs->bclim = (BCPos)(ls->sizebcstack - oldbase);
  1822.     /* Store new prototype in the constant array of the parent. */
  1823.     expr_init(e, VRELOCABLE,
  1824.         bcemit_AD(pfs, BC_FNEW, 0,
  1825.               const_gc(pfs, (ktap_obj_t *)pt, KTAP_TPROTO)));
  1826.     if (!(pfs->flags & PROTO_CHILD)) {
  1827.         if (pfs->flags & PROTO_HAS_RETURN)
  1828.             pfs->flags |= PROTO_FIXUP_RETURN;
  1829.         pfs->flags |= PROTO_CHILD;
  1830.     }
  1831.     //kp_lex_next(ls);
  1832. }

  1833. /* Parse body of a function, for 'trace/trace_end/profile/tick' closure */
  1834. static void parse_body_no_args(LexState *ls, ExpDesc *e, int needself,
  1835.                 BCLine line)
  1836. {
  1837.     FuncState fs, *pfs = ls->fs;
  1838.     FuncScope bl;
  1839.     ktap_proto_t *pt;
  1840.     ptrdiff_t oldbase = pfs->bcbase - ls->bcstack;

  1841.     fs_init(ls, &fs);
  1842.     fscope_begin(&fs, &bl, 0);
  1843.     fs.linedefined = line;
  1844.     fs.numparams = 0;
  1845.     fs.bcbase = pfs->bcbase + pfs->pc;
  1846.     fs.bclim = pfs->bclim - pfs->pc;
  1847.     bcemit_AD(&fs, BC_FUNCF, 0, 0);  /* Placeholder. */
  1848.     lex_check(ls, '{');
  1849.     parse_chunk(ls);
  1850.     lex_check(ls, '}');
  1851.     pt = fs_finish(ls, (ls->lastline = ls->linenumber));
  1852.     pfs->bcbase = ls->bcstack + oldbase;  /* May have been reallocated. */
  1853.     pfs->bclim = (BCPos)(ls->sizebcstack - oldbase);
  1854.     /* Store new prototype in the constant array of the parent. */
  1855.     expr_init(e, VRELOCABLE,
  1856.         bcemit_AD(pfs, BC_FNEW, 0,
  1857.               const_gc(pfs, (ktap_obj_t *)pt, KTAP_TPROTO)));
  1858.     if (!(pfs->flags & PROTO_CHILD)) {
  1859.         if (pfs->flags & PROTO_HAS_RETURN)
  1860.             pfs->flags |= PROTO_FIXUP_RETURN;
  1861.         pfs->flags |= PROTO_CHILD;
  1862.     }
  1863.     //kp_lex_next(ls);
  1864. }


  1865. /* Parse expression list. Last expression is left open. */
  1866. static BCReg expr_list(LexState *ls, ExpDesc *v)
  1867. {
  1868.     BCReg n = 1;

  1869.     expr(ls, v);
  1870.     while (lex_opt(ls, ',')) {
  1871.         expr_tonextreg(ls->fs, v);
  1872.         expr(ls, v);
  1873.         n++;
  1874.     }
  1875.     return n;
  1876. }

  1877. /* Parse function argument list. */
  1878. static void parse_args(LexState *ls, ExpDesc *e)
  1879. {
  1880.     FuncState *fs = ls->fs;
  1881.     ExpDesc args;
  1882.     BCIns ins;
  1883.     BCReg base;
  1884.     BCLine line = ls->linenumber;

  1885.     if (ls->tok == '(') {
  1886.         if (line != ls->lastline)
  1887.             err_syntax(ls, KP_ERR_XAMBIG);
  1888.         kp_lex_next(ls);
  1889.         if (ls->tok == ')') {  /* f(). */
  1890.             args.k = VVOID;
  1891.         } else {
  1892.             expr_list(ls, &args);
  1893.             /* f(a, b, g()) or f(a, b, ...). */
  1894.             if (args.k == VCALL) {
  1895.                 /* Pass on multiple results. */
  1896.                 setbc_b(bcptr(fs, &args), 0);
  1897.             }
  1898.         }
  1899.         lex_match(ls, ')', '(', line);
  1900.     } else if (ls->tok == '{') {
  1901.         expr_table(ls, &args);
  1902.     } else if (ls->tok == TK_string) {
  1903.         expr_init(&args, VKSTR, 0);
  1904.         args.u.sval = rawtsvalue(&ls->tokval);
  1905.         kp_lex_next(ls);
  1906.     } else {
  1907.         err_syntax(ls, KP_ERR_XFUNARG);
  1908.         return/* Silence compiler. */
  1909.     }

  1910.     kp_assert(e->k == VNONRELOC);
  1911.     base = e->u.s.info;  /* Base register for call. */
  1912.     if (args.k == VCALL) {
  1913.         ins = BCINS_ABC(BC_CALLM, base, 2, args.u.s.aux - base - 1);
  1914.     } else {
  1915.         if (args.k != VVOID)
  1916.             expr_tonextreg(fs, &args);
  1917.         ins = BCINS_ABC(BC_CALL, base, 2, fs->freereg - base);
  1918.     }
  1919.     expr_init(e, VCALL, bcemit_INS(fs, ins));
  1920.     e->u.s.aux = base;
  1921.     fs->bcbase[fs->pc - 1].line = line;
  1922.     fs->freereg = base+1/* Leave one result by default. */
  1923. }

  1924. /* Parse primary expression. */
  1925. static void expr_primary(LexState *ls, ExpDesc *v)
  1926. {
  1927.     FuncState *fs = ls->fs;

  1928.     /* Parse prefix expression. */
  1929.     if (ls->tok == '(') {
  1930.         BCLine line = ls->linenumber;
  1931.         kp_lex_next(ls);
  1932.         expr(ls, v);
  1933.         lex_match(ls, ')', '(', line);
  1934.         expr_discharge(ls->fs, v);
  1935.     } else if (ls->tok == TK_name) {
  1936.         var_lookup(ls, v);
  1937.     } else {
  1938.         err_syntax(ls, KP_ERR_XSYMBOL);
  1939.     }

  1940.     for (;;) {  /* Parse multiple expression suffixes. */
  1941.         if (ls->tok == '.') {
  1942.             expr_field(ls, v);
  1943.         } else if (ls->tok == '[') {
  1944.             ExpDesc key;
  1945.             expr_toanyreg(fs, v);
  1946.             expr_bracket(ls, &key);
  1947.             expr_index(fs, v, &key);
  1948.         } else if (ls->tok == ':') {
  1949.             ExpDesc key;
  1950.             kp_lex_next(ls);
  1951.             expr_str(ls, &key);
  1952.             bcemit_method(fs, v, &key);
  1953.             parse_args(ls, v);
  1954.         } else if (ls->tok == '(' || ls->tok == TK_string ||
  1955.                 ls->tok == '{') {
  1956.             expr_tonextreg(fs, v);
  1957.             parse_args(ls, v);
  1958.         } else {
  1959.             break;
  1960.         }
  1961.     }
  1962. }

  1963. /* Parse simple expression. */
  1964. static void expr_simple(LexState *ls, ExpDesc *v)
  1965. {
  1966.     switch (ls->tok) {
  1967.     case TK_number:
  1968.         expr_init(v, VKNUM, 0);
  1969.         set_obj(&v->u.nval, &ls->tokval);
  1970.         break;
  1971.     case TK_string:
  1972.         expr_init(v, VKSTR, 0);
  1973.         v->u.sval = rawtsvalue(&ls->tokval);
  1974.         break;
  1975.     case TK_nil:
  1976.         expr_init(v, VKNIL, 0);
  1977.         break;
  1978.     case TK_true:
  1979.         expr_init(v, VKTRUE, 0);
  1980.         break;
  1981.     case TK_false:
  1982.         expr_init(v, VKFALSE, 0);
  1983.         break;
  1984.     case TK_dots: {  /* Vararg. */
  1985.         FuncState *fs = ls->fs;
  1986.         BCReg base;
  1987.         checkcond(ls, fs->flags & PROTO_VARARG, KP_ERR_XDOTS);
  1988.         bcreg_reserve(fs, 1);
  1989.         base = fs->freereg-1;
  1990.         expr_init(v, VCALL, bcemit_ABC(fs, BC_VARG, base, 2,
  1991.         fs->numparams));
  1992.         v->u.s.aux = base;
  1993.         break;
  1994.     }
  1995.     case '{'/* Table constructor. */
  1996.         expr_table(ls, v);
  1997.         return;
  1998.     case TK_function:
  1999.         kp_lex_next(ls);
  2000.         parse_body(ls, v, 0, ls->linenumber);
  2001.         return;
  2002.     case TK_argstr:
  2003.         expr_init(v, VARGSTR, 0);
  2004.         break;
  2005.     case TK_probename:
  2006.         expr_init(v, VARGNAME, 0);
  2007.         break;
  2008.     case TK_arg0: case TK_arg1: case TK_arg2: case TK_arg3: case TK_arg4:
  2009.     case TK_arg5: case TK_arg6: case TK_arg7: case TK_arg8: case TK_arg9:
  2010.         expr_init(v, VARGN, ls->tok - TK_arg0);
  2011.         break;
  2012.     case TK_pid:
  2013.         expr_init(v, VPID, 0);
  2014.         break;
  2015.     case TK_tid:
  2016.         expr_init(v, VTID, 0);
  2017.         break;
  2018.     case TK_uid:
  2019.         expr_init(v, VUID, 0);
  2020.         break;
  2021.     case TK_cpu:
  2022.         expr_init(v, VCPU, 0);
  2023.         break;
  2024.     case TK_execname:
  2025.         expr_init(v, VEXECNAME, 0);
  2026.         break;
  2027.     default:
  2028.         expr_primary(ls, v);
  2029.         return;
  2030.     }
  2031.     kp_lex_next(ls);
  2032. }

  2033. /* Manage syntactic levels to avoid blowing up the stack. */
  2034. static void synlevel_begin(LexState *ls)
  2035. {
  2036.     if (++ls->level >= KP_MAX_XLEVEL)
  2037.         kp_lex_error(ls, 0, KP_ERR_XLEVELS);
  2038. }

  2039. #define synlevel_end(ls)    ((ls)->level--)

  2040. /* Convert token to binary operator. */
  2041. static BinOpr token2binop(LexToken tok)
  2042. {
  2043.     switch (tok) {
  2044.     case '+':    return OPR_ADD;
  2045.     case '-':    return OPR_SUB;
  2046.     case '*':    return OPR_MUL;
  2047.     case '/':    return OPR_DIV;
  2048.     case '%':    return OPR_MOD;
  2049.     case '^':    return OPR_POW;
  2050.     case TK_concat: return OPR_CONCAT;
  2051.     case TK_ne:    return OPR_NE;
  2052.     case TK_eq:    return OPR_EQ;
  2053.     case '<':    return OPR_LT;
  2054.     case TK_le:    return OPR_LE;
  2055.     case '>':    return OPR_GT;
  2056.     case TK_ge:    return OPR_GE;
  2057.     case TK_and:    return OPR_AND;
  2058.     case TK_or:    return OPR_OR;
  2059.     default:    return OPR_NOBINOPR;
  2060.     }
  2061. }

  2062. /* Priorities for each binary operator. ORDER OPR. */
  2063. static const struct {
  2064.     uint8_t left;    /* Left priority. */
  2065.     uint8_t right;    /* Right priority. */
  2066. } priority[] = {
  2067.     {6,6}, {6,6}, {7,7}, {7,7}, {7,7},    /* ADD SUB MUL DIV MOD */
  2068.     {10,9}, {5,4},            /* POW CONCAT (right associative) */
  2069.     {3,3}, {3,3},                /* EQ NE */
  2070.     {3,3}, {3,3}, {3,3}, {3,3},        /* LT GE GT LE */
  2071.     {2,2}, {1,1}                /* AND OR */
  2072. };

  2073. #define UNARY_PRIORITY        8  /* Priority for unary operators. */

  2074. /* Forward declaration. */
  2075. static BinOpr expr_binop(LexState *ls, ExpDesc *v, uint32_t limit);

  2076. /* Parse unary expression. */
  2077. static void expr_unop(LexState *ls, ExpDesc *v)
  2078. {
  2079.     BCOp op;
  2080.     if (ls->tok == TK_not) {
  2081.         op = BC_NOT;
  2082.     } else if (ls->tok == '-') {
  2083.         op = BC_UNM;
  2084. #if 0 /* ktap don't support lua length operator '#' */
  2085.     } else if (ls->tok == '#') {
  2086.         op = BC_LEN;
  2087. #endif
  2088.     } else {
  2089.         expr_simple(ls, v);
  2090.         return;
  2091.     }
  2092.     kp_lex_next(ls);
  2093.     expr_binop(ls, v, UNARY_PRIORITY);
  2094.     bcemit_unop(ls->fs, op, v);
  2095. }

  2096. /* Parse binary expressions with priority higher than the limit. */
  2097. static BinOpr expr_binop(LexState *ls, ExpDesc *v, uint32_t limit)
  2098. {
  2099.     BinOpr op;

  2100.     synlevel_begin(ls);
  2101.     expr_unop(ls, v);
  2102.     op = token2binop(ls->tok);
  2103.     while (op != OPR_NOBINOPR && priority[op].left > limit) {
  2104.         ExpDesc v2;
  2105.         BinOpr nextop;
  2106.         kp_lex_next(ls);
  2107.         bcemit_binop_left(ls->fs, op, v);
  2108.         /* Parse binary expression with higher priority. */
  2109.         nextop = expr_binop(ls, &v2, priority[op].right);
  2110.         bcemit_binop(ls->fs, op, v, &v2);
  2111.         op = nextop;
  2112.     }
  2113.     synlevel_end(ls);
  2114.     return op;  /* Return unconsumed binary operator (if any). */
  2115. }

  2116. /* Parse expression. */
  2117. static void expr(LexState *ls, ExpDesc *v)
  2118. {
  2119.     expr_binop(ls, v, 0);  /* Priority 0: parse whole expression. */
  2120. }

  2121. /* Assign expression to the next register. */
  2122. static void expr_next(LexState *ls)
  2123. {
  2124.     ExpDesc e;
  2125.     expr(ls, &e);
  2126.     expr_tonextreg(ls->fs, &e);
  2127. }

  2128. /* Parse conditional expression. */
  2129. static BCPos expr_cond(LexState *ls)
  2130. {
  2131.     ExpDesc v;

  2132.     lex_check(ls, '(');
  2133.     expr(ls, &v);
  2134.     if (v.k == VKNIL)
  2135.         v.k = VKFALSE;
  2136.     bcemit_branch_t(ls->fs, &v);
  2137.     lex_check(ls, ')');
  2138.     return v.f;
  2139. }

  2140. /* -- Assignments --------------------------------------------------------- */

  2141. /* List of LHS variables. */
  2142. typedef struct LHSVarList {
  2143.     ExpDesc v;            /* LHS variable. */
  2144.     struct LHSVarList *prev;    /* Link to previous LHS variable. */
  2145. } LHSVarList;

  2146. /* Eliminate write-after-read hazards for local variable assignment. */
  2147. static void assign_hazard(LexState *ls, LHSVarList *lh, const ExpDesc *v)
  2148. {
  2149.     FuncState *fs = ls->fs;
  2150.     BCReg reg = v->u.s.info; /* Check against this variable. */
  2151.     BCReg tmp = fs->freereg; /* Rename to this temp. register(if needed) */
  2152.     int hazard = 0;

  2153.     for (; lh; lh = lh->prev) {
  2154.         if (lh->v.k == VINDEXED) {
  2155.             if (lh->v.u.s.info == reg) {  /* t[i], t = 1, 2 */
  2156.                 hazard = 1;
  2157.                 lh->v.u.s.info = tmp;
  2158.             }
  2159.             if (lh->v.u.s.aux == reg) {  /* t[i], i = 1, 2 */
  2160.                 hazard = 1;
  2161.                 lh->v.u.s.aux = tmp;
  2162.             }
  2163.         }
  2164.     }
  2165.     if (hazard) {
  2166.         /* Rename conflicting variable. */
  2167.         bcemit_AD(fs, BC_MOV, tmp, reg);
  2168.         bcreg_reserve(fs, 1);
  2169.     }
  2170. }

  2171. /* Adjust LHS/RHS of an assignment. */
  2172. static void assign_adjust(LexState *ls, BCReg nvars, BCReg nexps, ExpDesc *e)
  2173. {
  2174.     FuncState *fs = ls->fs;
  2175.     int32_t extra = (int32_t)nvars - (int32_t)nexps;

  2176.     if (e->k == VCALL) {
  2177.         extra++;  /* Compensate for the VCALL itself. */
  2178.         if (extra < 0)
  2179.             extra = 0;
  2180.         setbc_b(bcptr(fs, e), extra+1);  /* Fixup call results. */
  2181.         if (extra > 1)
  2182.             bcreg_reserve(fs, (BCReg)extra-1);
  2183.     } else {
  2184.         if (e->k != VVOID)
  2185.             expr_tonextreg(fs, e);  /* Close last expression. */
  2186.         if (extra > 0) {  /* Leftover LHS are set to nil. */
  2187.             BCReg reg = fs->freereg;
  2188.             bcreg_reserve(fs, (BCReg)extra);
  2189.             bcemit_nil(fs, reg, (BCReg)extra);
  2190.         }
  2191.     }
  2192. }

  2193. /* Recursively parse assignment statement. */
  2194. static void parse_assignment(LexState *ls, LHSVarList *lh, BCReg nvars)
  2195. {
  2196.     ExpDesc e;

  2197.     checkcond(ls, VLOCAL <= lh->v.k && lh->v.k <= VINDEXED,
  2198.             KP_ERR_XSYNTAX);
  2199.     if (lex_opt(ls, ',')) {  /* Collect LHS list and recurse upwards. */
  2200.         LHSVarList vl;
  2201.         vl.prev = lh;
  2202.         expr_primary(ls, &vl.v);
  2203.         if (vl.v.k == VLOCAL)
  2204.             assign_hazard(ls, lh, &vl.v);
  2205.         checklimit(ls->fs, ls->level + nvars, KP_MAX_XLEVEL,
  2206.                 "variable names");
  2207.         parse_assignment(ls, &vl, nvars+1);
  2208.     } else/* Parse RHS. */
  2209.         BCReg nexps;
  2210.         int assign_incr = 1;

  2211.         if (lex_opt(ls, '='))
  2212.             assign_incr = 0;
  2213.         else if (lex_opt(ls, TK_incr))
  2214.             assign_incr = 1;
  2215.         else
  2216.             err_syntax(ls, KP_ERR_XSYMBOL);

  2217.         nexps = expr_list(ls, &e);
  2218.         if (nexps == nvars) {
  2219.             if (e.k == VCALL) {
  2220.                 /* Vararg assignment. */
  2221.                 if (bc_op(*bcptr(ls->fs, &e)) == BC_VARG) {
  2222.                     ls->fs->freereg--;
  2223.                     e.k = VRELOCABLE;
  2224.                 } else/* Multiple call results. */
  2225.                     /* Base of call is not relocatable. */
  2226.                     e.u.s.info = e.u.s.aux;
  2227.                     e.k = VNONRELOC;
  2228.                 }
  2229.             }
  2230.             if (assign_incr == 0)
  2231.                 bcemit_store(ls->fs, &lh->v, &e);
  2232.             else
  2233.                 bcemit_store_incr(ls->fs, &lh->v, &e);
  2234.             return;
  2235.         }
  2236.         assign_adjust(ls, nvars, nexps, &e);
  2237.         if (nexps > nvars) {
  2238.             /* Drop leftover regs. */
  2239.             ls->fs->freereg -= nexps - nvars;
  2240.         }
  2241.     }
  2242.     /* Assign RHS to LHS and recurse downwards. */
  2243.     expr_init(&e, VNONRELOC, ls->fs->freereg-1);
  2244.     bcemit_store(ls->fs, &lh->v, &e);
  2245. }

  2246. /* Parse call statement or assignment. */
  2247. static void parse_call_assign(LexState *ls)
  2248. {
  2249.     FuncState *fs = ls->fs;
  2250.     LHSVarList vl;

  2251.     expr_primary(ls, &vl.v);
  2252.     if (vl.v.k == VCALL) {  /* Function call statement. */
  2253.         setbc_b(bcptr(fs, &vl.v), 1);  /* No results. */
  2254.     } else/* Start of an assignment. */
  2255.         vl.prev = NULL;
  2256.         parse_assignment(ls, &vl, 1);
  2257.     }
  2258. }

  2259. /* Parse 'var'(local in lua) statement. */
  2260. static void parse_local(LexState *ls)
  2261. {
  2262.     if (lex_opt(ls, TK_function)) {  /* Local function declaration. */
  2263.         ExpDesc v, b;
  2264.         FuncState *fs = ls->fs;
  2265.         var_new(ls, 0, lex_str(ls));
  2266.         expr_init(&v, VLOCAL, fs->freereg);
  2267.         v.u.s.aux = fs->varmap[fs->freereg];
  2268.         bcreg_reserve(fs, 1);
  2269.         var_add(ls, 1);
  2270.         parse_body(ls, &b, 0, ls->linenumber);
  2271.         /* bcemit_store(fs, &v, &b) without setting VSTACK_VAR_RW. */
  2272.         expr_free(fs, &b);
  2273.         expr_toreg(fs, &b, v.u.s.info);
  2274.         /* The upvalue is in scope, but the local is only valid
  2275.          * after the store. */
  2276.         var_get(ls, fs, fs->nactvar - 1).startpc = fs->pc;
  2277.     } else/* Local variable declaration. */
  2278.         ExpDesc e;
  2279.         BCReg nexps, nvars = 0;
  2280.         do/* Collect LHS. */
  2281.             var_new(ls, nvars++, lex_str(ls));
  2282.         } while (lex_opt(ls, ','));
  2283.         if (lex_opt(ls, '=')) {  /* Optional RHS. */
  2284.             nexps = expr_list(ls, &e);
  2285.         } else/* Or implicitly set to nil. */
  2286.             e.k = VVOID;
  2287.             nexps = 0;
  2288.         }
  2289.         assign_adjust(ls, nvars, nexps, &e);
  2290.         var_add(ls, nvars);
  2291.     }
  2292. }

  2293. /* Parse 'function' statement. */
  2294. static void parse_func(LexState *ls, BCLine line)
  2295. {
  2296.     FuncState *fs = ls->fs;
  2297.     ExpDesc v, b;

  2298.     kp_lex_next(ls);  /* Skip 'function'. */

  2299.     /* function is declared as local */
  2300. #if 1
  2301.     var_new(ls, 0, lex_str(ls));
  2302.     expr_init(&v, VLOCAL, fs->freereg);
  2303.     v.u.s.aux = fs->varmap[fs->freereg];
  2304.     bcreg_reserve(fs, 1);
  2305.     var_add(ls, 1);
  2306.     parse_body(ls, &b, 0, ls->linenumber);
  2307.     /* bcemit_store(fs, &v, &b) without setting VSTACK_VAR_RW. */
  2308.     expr_free(fs, &b);
  2309.     expr_toreg(fs, &b, v.u.s.info);
  2310.     /* The upvalue is in scope, but the local is only valid
  2311.      * after the store. */
  2312.     var_get(ls, fs, fs->nactvar - 1).startpc = fs->pc;

  2313. #else
  2314.     int needself = 0;

  2315.     /* Parse function name. */
  2316.     var_lookup(ls, &v);
  2317.     while (ls->tok == '.')  /* Multiple dot-separated fields. */
  2318.         expr_field(ls, &v);
  2319.     if (ls->tok == ':') {  /* Optional colon to signify method call. */
  2320.         needself = 1;
  2321.         expr_field(ls, &v);
  2322.     }
  2323.     parse_body(ls, &b, needself, line);
  2324.     fs = ls->fs;
  2325.     bcemit_store(fs, &v, &b);
  2326.     fs->bcbase[fs->pc - 1].line = line;  /* Set line for the store. */
  2327. #endif
  2328. }

  2329. /* -- Control transfer statements ----------------------------------------- */

  2330. /* Check for end of block. */
  2331. static int parse_isend(LexToken tok)
  2332. {
  2333.     switch (tok) {
  2334.     case TK_else: case TK_elseif: case TK_end: case TK_until: case TK_eof:
  2335.     case '}':
  2336.         return 1;
  2337.     default:
  2338.         return 0;
  2339.     }
  2340. }

  2341. /* Parse 'return' statement. */
  2342. static void parse_return(LexState *ls)
  2343. {
  2344.     BCIns ins;
  2345.     FuncState *fs = ls->fs;

  2346.     kp_lex_next(ls);  /* Skip 'return'. */
  2347.     fs->flags |= PROTO_HAS_RETURN;
  2348.     if (parse_isend(ls->tok) || ls->tok == ';') {  /* Bare return. */
  2349.         ins = BCINS_AD(BC_RET0, 0, 1);
  2350.     } else/* Return with one or more values. */
  2351.         ExpDesc e;  /* Receives the _last_ expression in the list. */
  2352.         BCReg nret = expr_list(ls, &e);
  2353.         if (nret == 1) {  /* Return one result. */
  2354.             if (e.k == VCALL) {  /* Check for tail call. */
  2355.                 BCIns *ip = bcptr(fs, &e);
  2356.                 /* It doesn't pay off to add BC_VARGT just
  2357.                  * for 'return ...'. */
  2358.                 if (bc_op(*ip) == BC_VARG)
  2359.                     goto notailcall;
  2360.                 fs->pc--;
  2361.                 ins = BCINS_AD(bc_op(*ip)-BC_CALL+BC_CALLT,
  2362.                         bc_a(*ip), bc_c(*ip));
  2363.             } else { /* Can return the result from any register. */
  2364.                 ins = BCINS_AD(BC_RET1,
  2365.                     expr_toanyreg(fs, &e), 2);
  2366.             }
  2367.         } else {
  2368.             if (e.k == VCALL) {/* Append all results from a call */
  2369. notailcall:
  2370.                 setbc_b(bcptr(fs, &e), 0);
  2371.                 ins = BCINS_AD(BC_RETM, fs->nactvar,
  2372.                         e.u.s.aux - fs->nactvar);
  2373.             } else {
  2374.                 /* Force contiguous registers. */
  2375.                 expr_tonextreg(fs, &e);
  2376.                 ins = BCINS_AD(BC_RET, fs->nactvar, nret+1);
  2377.             }
  2378.         }
  2379.     }
  2380.     if (fs->flags & PROTO_CHILD) {
  2381.         /* May need to close upvalues first. */
  2382.         bcemit_AJ(fs, BC_UCLO, 0, 0);
  2383.     }
  2384.     bcemit_INS(fs, ins);
  2385. }

  2386. /* Parse 'break' statement. */
  2387. static void parse_break(LexState *ls)
  2388. {
  2389.     ls->fs->bl->flags |= FSCOPE_BREAK;
  2390.     gola_new(ls, NAME_BREAK, VSTACK_GOTO, bcemit_jmp(ls->fs));
  2391. }

  2392. /* Parse label. */
  2393. static void parse_label(LexState *ls)
  2394. {
  2395.     FuncState *fs = ls->fs;
  2396.     ktap_str_t *name;
  2397.     int idx;

  2398.     fs->lasttarget = fs->pc;
  2399.     fs->bl->flags |= FSCOPE_GOLA;
  2400.     kp_lex_next(ls);  /* Skip '::'. */
  2401.     name = lex_str(ls);
  2402.     if (gola_findlabel(ls, name))
  2403.         kp_lex_error(ls, 0, KP_ERR_XLDUP, getstr(name));
  2404.     idx = gola_new(ls, name, VSTACK_LABEL, fs->pc);
  2405.     lex_check(ls, TK_label);
  2406.     /* Recursively parse trailing statements: labels and ';'. */
  2407.     for (;;) {
  2408.         if (ls->tok == TK_label) {
  2409.             synlevel_begin(ls);
  2410.             parse_label(ls);
  2411.             synlevel_end(ls);
  2412.         } else if (ls->tok == ';') {
  2413.             kp_lex_next(ls);
  2414.         } else {
  2415.             break;
  2416.         }
  2417.     }
  2418.     /* Trailing label is considered to be outside of scope. */
  2419.     if (parse_isend(ls->tok) && ls->tok != TK_until)
  2420.         ls->vstack[idx].slot = fs->bl->nactvar;
  2421.     gola_resolve(ls, fs->bl, idx);
  2422. }

  2423. /* -- Blocks, loops and conditional statements ---------------------------- */

  2424. /* Parse a block. */
  2425. static void parse_block(LexState *ls)
  2426. {
  2427.     FuncState *fs = ls->fs;
  2428.     FuncScope bl;

  2429.     fscope_begin(fs, &bl, 0);
  2430.     parse_chunk(ls);
  2431.     fscope_end(fs);
  2432. }

  2433. /* Parse 'while' statement. */
  2434. static void parse_while(LexState *ls, BCLine line)
  2435. {
  2436.     FuncState *fs = ls->fs;
  2437.     BCPos start, loop, condexit;
  2438.     FuncScope bl;

  2439.     kp_lex_next(ls);  /* Skip 'while'. */
  2440.     start = fs->lasttarget = fs->pc;
  2441.     condexit = expr_cond(ls);
  2442.     fscope_begin(fs, &bl, FSCOPE_LOOP);
  2443.     //lex_check(ls, TK_do);
  2444.     lex_check(ls, '{');
  2445.     loop = bcemit_AD(fs, BC_LOOP, fs->nactvar, 0);
  2446.     parse_block(ls);
  2447.     jmp_patch(fs, bcemit_jmp(fs), start);
  2448.     //lex_match(ls, TK_end, TK_while, line);
  2449.     lex_check(ls, '}');
  2450.     fscope_end(fs);
  2451.     jmp_tohere(fs, condexit);
  2452.     jmp_patchins(fs, loop, fs->pc);
  2453. }

  2454. /* Parse 'repeat' statement. */
  2455. static void parse_repeat(LexState *ls, BCLine line)
  2456. {
  2457.     FuncState *fs = ls->fs;
  2458.     BCPos loop = fs->lasttarget = fs->pc;
  2459.     BCPos condexit;
  2460.     FuncScope bl1, bl2;

  2461.     fscope_begin(fs, &bl1, FSCOPE_LOOP);  /* Breakable loop scope. */
  2462.     fscope_begin(fs, &bl2, 0);  /* Inner scope. */
  2463.     kp_lex_next(ls);  /* Skip 'repeat'. */
  2464.     bcemit_AD(fs, BC_LOOP, fs->nactvar, 0);
  2465.     parse_chunk(ls);
  2466.     lex_match(ls, TK_until, TK_repeat, line);
  2467.     /* Parse condition (still inside inner scope). */
  2468.     condexit = expr_cond(ls);
  2469.     /* No upvalues? Just end inner scope. */
  2470.     if (!(bl2.flags & FSCOPE_UPVAL)) {
  2471.         fscope_end(fs);
  2472.     } else {
  2473.         /* Otherwise generate: cond: UCLO+JMP out,
  2474.          * !cond: UCLO+JMP loop. */
  2475.         parse_break(ls);  /* Break from loop and close upvalues. */
  2476.         jmp_tohere(fs, condexit);
  2477.         fscope_end(fs);  /* End inner scope and close upvalues. */
  2478.         condexit = bcemit_jmp(fs);
  2479.     }
  2480.     jmp_patch(fs, condexit, loop);  /* Jump backwards if !cond. */
  2481.     jmp_patchins(fs, loop, fs->pc);
  2482.     fscope_end(fs);  /* End loop scope. */
  2483. }

  2484. /* Parse numeric 'for'. */
  2485. static void parse_for_num(LexState *ls, ktap_str_t *varname, BCLine line)
  2486. {
  2487.     FuncState *fs = ls->fs;
  2488.     BCReg base = fs->freereg;
  2489.     FuncScope bl;
  2490.     BCPos loop, loopend;

  2491.     /* Hidden control variables. */
  2492.     var_new_fixed(ls, FORL_IDX, VARNAME_FOR_IDX);
  2493.     var_new_fixed(ls, FORL_STOP, VARNAME_FOR_STOP);
  2494.     var_new_fixed(ls, FORL_STEP, VARNAME_FOR_STEP);
  2495.     /* Visible copy of index variable. */
  2496.     var_new(ls, FORL_EXT, varname);
  2497.     lex_check(ls, '=');
  2498.     expr_next(ls);
  2499.     lex_check(ls, ',');
  2500.     expr_next(ls);
  2501.     if (lex_opt(ls, ',')) {
  2502.         expr_next(ls);
  2503.     } else {
  2504.         /* Default step is 1. */
  2505.         bcemit_AD(fs, BC_KSHORT, fs->freereg, 1);
  2506.         bcreg_reserve(fs, 1);
  2507.     }
  2508.     var_add(ls, 3);  /* Hidden control variables. */
  2509.     //lex_check(ls, TK_do);
  2510.     lex_check(ls, ')');
  2511.     lex_check(ls, '{');
  2512.     loop = bcemit_AJ(fs, BC_FORI, base, NO_JMP);
  2513.     fscope_begin(fs, &bl, 0);  /* Scope for visible variables. */
  2514.     var_add(ls, 1);
  2515.     bcreg_reserve(fs, 1);
  2516.     parse_block(ls);
  2517.     fscope_end(fs);
  2518.     /* Perform loop inversion. Loop control instructions are at the end. */
  2519.     loopend = bcemit_AJ(fs, BC_FORL, base, NO_JMP);
  2520.     fs->bcbase[loopend].line = line;  /* Fix line for control ins. */
  2521.     jmp_patchins(fs, loopend, loop+1);
  2522.     jmp_patchins(fs, loop, fs->pc);
  2523. }

  2524. /*
  2525. * Try to predict whether the iterator is next() and specialize the bytecode.
  2526. * Detecting next() and pairs() by name is simplistic, but quite effective.
  2527. * The interpreter backs off if the check for the closure fails at runtime.
  2528. */
  2529. static int predict_next(LexState *ls, FuncState *fs, BCPos pc)
  2530. {
  2531.     BCIns ins = fs->bcbase[pc].ins;
  2532.     ktap_str_t *name;
  2533.     const ktap_val_t *o;

  2534.     switch (bc_op(ins)) {
  2535.     case BC_MOV:
  2536.         name = var_get(ls, fs, bc_d(ins)).name;
  2537.         break;
  2538.     case BC_UGET:
  2539.         name = ls->vstack[fs->uvmap[bc_d(ins)]].name;
  2540.         break;
  2541.     case BC_GGET:
  2542.         /* There's no inverse index (yet), so lookup the strings. */
  2543.         o = kp_tab_getstr(fs->kt, kp_str_newz("pairs"));
  2544.         if (o && tvhaskslot(o) && tvkslot(o) == bc_d(ins))
  2545.             return 1;
  2546.         o = kp_tab_getstr(fs->kt, kp_str_newz("next"));
  2547.         if (o && tvhaskslot(o) && tvkslot(o) == bc_d(ins))
  2548.             return 1;
  2549.         return 0;
  2550.     default:
  2551.         return 0;
  2552.     }

  2553.     return (name->len == 5 && !strcmp(getstr(name), "pairs")) ||
  2554.         (name->len == 4 && !strcmp(getstr(name), "next"));
  2555. }

  2556. /* Parse 'for' iterator. */
  2557. static void parse_for_iter(LexState *ls, ktap_str_t *indexname)
  2558. {
  2559.     FuncState *fs = ls->fs;
  2560.     ExpDesc e;
  2561.     BCReg nvars = 0;
  2562.     BCLine line;
  2563.     BCReg base = fs->freereg + 3;
  2564.     BCPos loop, loopend, exprpc = fs->pc;
  2565.     FuncScope bl;
  2566.     int isnext;

  2567.     /* Hidden control variables. */
  2568.     var_new_fixed(ls, nvars++, VARNAME_FOR_GEN);
  2569.     var_new_fixed(ls, nvars++, VARNAME_FOR_STATE);
  2570.     var_new_fixed(ls, nvars++, VARNAME_FOR_CTL);

  2571.     /* Visible variables returned from iterator. */
  2572.     var_new(ls, nvars++, indexname);
  2573.     while (lex_opt(ls, ','))
  2574.         var_new(ls, nvars++, lex_str(ls));
  2575.     lex_check(ls, TK_in);
  2576.     line = ls->linenumber;
  2577.     assign_adjust(ls, 3, expr_list(ls, &e), &e);
  2578.     /* The iterator needs another 3 slots (func + 2 args). */
  2579.     bcreg_bump(fs, 3);
  2580.     isnext = (nvars <= 5 && predict_next(ls, fs, exprpc));
  2581.     var_add(ls, 3);  /* Hidden control variables. */
  2582.     //lex_check(ls, TK_do);
  2583.     lex_check(ls, ')');
  2584.     lex_check(ls, '{');
  2585.     loop = bcemit_AJ(fs, isnext ? BC_ISNEXT : BC_JMP, base, NO_JMP);
  2586.     fscope_begin(fs, &bl, 0);  /* Scope for visible variables. */
  2587.     var_add(ls, nvars-3);
  2588.     bcreg_reserve(fs, nvars-3);
  2589.     parse_block(ls);
  2590.     fscope_end(fs);
  2591.     /* Perform loop inversion. Loop control instructions are at the end. */
  2592.     jmp_patchins(fs, loop, fs->pc);
  2593.     bcemit_ABC(fs, isnext ? BC_ITERN : BC_ITERC, base, nvars-3+1, 2+1);
  2594.     loopend = bcemit_AJ(fs, BC_ITERL, base, NO_JMP);
  2595.     fs->bcbase[loopend-1].line = line;  /* Fix line for control ins. */
  2596.     fs->bcbase[loopend].line = line;
  2597.     jmp_patchins(fs, loopend, loop+1);
  2598. }

  2599. /* Parse 'for' statement. */
  2600. static void parse_for(LexState *ls, BCLine line)
  2601. {
  2602.     FuncState *fs = ls->fs;
  2603.     ktap_str_t *varname;
  2604.     FuncScope bl;

  2605.     fscope_begin(fs, &bl, FSCOPE_LOOP);
  2606.     kp_lex_next(ls);  /* Skip 'for'. */
  2607.     lex_check(ls, '(');
  2608.     varname = lex_str(ls);  /* Get first variable name. */
  2609.     if (ls->tok == '=')
  2610.         parse_for_num(ls, varname, line);
  2611.     else if (ls->tok == ',' || ls->tok == TK_in)
  2612.         parse_for_iter(ls, varname);
  2613.     else
  2614.         err_syntax(ls, KP_ERR_XFOR);
  2615.     //lex_check(ls, '}');
  2616.     //lex_match(ls, TK_end, TK_for, line);
  2617.     lex_match(ls, '}', TK_for, line);
  2618.     fscope_end(fs);  /* Resolve break list. */
  2619. }

  2620. /* Parse condition and 'then' block. */
  2621. static BCPos parse_then(LexState *ls)
  2622. {
  2623.     BCPos condexit;
  2624.     kp_lex_next(ls);  /* Skip 'if' or 'elseif'. */
  2625.     condexit = expr_cond(ls);
  2626.     lex_check(ls, '{');
  2627.     parse_block(ls);
  2628.     lex_check(ls, '}');
  2629.     return condexit;
  2630. }

  2631. /* Parse 'if' statement. */
  2632. static void parse_if(LexState *ls, BCLine line)
  2633. {
  2634.     FuncState *fs = ls->fs;
  2635.     BCPos flist;
  2636.     BCPos escapelist = NO_JMP;
  2637.     flist = parse_then(ls);
  2638.     while (ls->tok == TK_elseif) {  /* Parse multiple 'elseif' blocks. */
  2639.         jmp_append(fs, &escapelist, bcemit_jmp(fs));
  2640.         jmp_tohere(fs, flist);
  2641.         flist = parse_then(ls);
  2642.     }
  2643.     if (ls->tok == TK_else) {  /* Parse optional 'else' block. */
  2644.         jmp_append(fs, &escapelist, bcemit_jmp(fs));
  2645.         jmp_tohere(fs, flist);
  2646.         kp_lex_next(ls);  /* Skip 'else'. */
  2647.         lex_check(ls, '{');
  2648.         parse_block(ls);
  2649.         lex_check(ls, '}');
  2650.     } else {
  2651.         jmp_append(fs, &escapelist, flist);
  2652.     }
  2653.     jmp_tohere(fs, escapelist);
  2654.     //lex_match(ls, TK_end, TK_if, line);
  2655. }

  2656. /* Parse 'trace' and 'trace_end' statement. */
  2657. static void parse_trace(LexState *ls)
  2658. {
  2659.     ExpDesc v, key, args;
  2660.     ktap_str_t *kdebug_str = kp_str_newz("kdebug");
  2661.     ktap_str_t *probe_str = kp_str_newz("trace_by_id");
  2662.     ktap_str_t *probe_end_str = kp_str_newz("trace_end");
  2663.     FuncState *fs = ls->fs;
  2664.     int token = ls->tok;
  2665.     BCIns ins;
  2666.     BCReg base;
  2667.     BCLine line = ls->linenumber;

  2668.     if (token == TK_trace)
  2669.         kp_lex_read_string_until(ls, '{');
  2670.     else
  2671.         kp_lex_next(ls);  /* skip "trace_end" keyword */

  2672.     /* kdebug */
  2673.     expr_init(&v, VGLOBAL, 0);
  2674.     v.u.sval = kdebug_str;
  2675.     expr_toanyreg(fs, &v);

  2676.     /* fieldsel: kdebug.probe */
  2677.     expr_init(&key, VKSTR, 0);
  2678.     key.u.sval = token == TK_trace ? probe_str : probe_end_str;
  2679.     expr_index(fs, &v, &key);

  2680.     /* funcargs*/
  2681.     expr_tonextreg(fs, &v);

  2682.     if (token == TK_trace) {
  2683.         ktap_eventdesc_t *evdef_info;
  2684.         const char *str;

  2685.         /* argument: EVENTDEF string */
  2686.         lex_check(ls, TK_string);
  2687.         str = svalue(&ls->tokval);
  2688.         evdef_info = kp_parse_events(str);
  2689.         if (!evdef_info)
  2690.             kp_lex_error(ls, 0, KP_ERR_XEVENTDEF, str);


  2691.         /* pass a userspace pointer to kernel */
  2692.         expr_init(&args, VKNUM, 0);
  2693.         set_number(&args.u.nval, (ktap_number)evdef_info);

  2694.         expr_tonextreg(fs, &args);
  2695.     }

  2696.     /* argument: callback function */
  2697.     parse_body_no_args(ls, &args, 0, ls->linenumber);

  2698.     expr_tonextreg(fs, &args);

  2699.     base = v.u.s.info;  /* base register for call */
  2700.     ins = BCINS_ABC(BC_CALL, base, 2, fs->freereg - base);

  2701.     expr_init(&v, VCALL, bcemit_INS(fs, ins));
  2702.     v.u.s.aux = base;
  2703.     fs->bcbase[fs->pc - 1].line = line;
  2704.     fs->freereg = base+1/* Leave one result by default. */

  2705.     setbc_b(bcptr(fs, &v), 1);  /* No results. */
  2706. }


  2707. /* Parse 'profile' and 'tick' statement. */
  2708. static void parse_timer(LexState *ls)
  2709. {
  2710.     FuncState *fs = ls->fs;
  2711.     ExpDesc v, key, args;
  2712.     ktap_str_t *token_str = rawtsvalue(&ls->tokval);
  2713.     ktap_str_t *interval_str;
  2714.     BCLine line = ls->linenumber;
  2715.     BCIns ins;
  2716.     BCReg base;

  2717.     kp_lex_next(ls);  /* skip '-' */

  2718.     kp_lex_read_string_until(ls, '{');
  2719.     interval_str = rawtsvalue(&ls->tokval);
  2720.     lex_check(ls, TK_string);

  2721.     /* timer */
  2722.     expr_init(&v, VGLOBAL, 0);
  2723.     v.u.sval = kp_str_newz("timer");
  2724.     expr_toanyreg(fs, &v);

  2725.     /* fieldsel: timer.profile, timer.tick */
  2726.     expr_init(&key, VKSTR, 0);
  2727.     key.u.sval = token_str;
  2728.     expr_index(fs, &v, &key);

  2729.     /* funcargs*/
  2730.     expr_tonextreg(fs, &v);

  2731.     /* argument: interval string */
  2732.     expr_init(&args, VKSTR, 0);
  2733.     args.u.sval = interval_str;

  2734.     expr_tonextreg(fs, &args);

  2735.     /* argument: callback function */
  2736.     parse_body_no_args(ls, &args, 0, ls->linenumber);

  2737.     expr_tonextreg(fs, &args);

  2738.     base = v.u.s.info;  /* base register for call */
  2739.     ins = BCINS_ABC(BC_CALL, base, 2, fs->freereg - base);

  2740.     expr_init(&v, VCALL, bcemit_INS(fs, ins));
  2741.     v.u.s.aux = base;
  2742.     fs->bcbase[fs->pc - 1].line = line;
  2743.     fs->freereg = base+1/* Leave one result by default. */

  2744.     setbc_b(bcptr(fs, &v), 1);  /* No results. */
  2745. }

  2746. /* -- Parse statements ---------------------------------------------------- */

  2747. /* Parse a statement. Returns 1 if it must be the last one in a chunk. */
  2748. static int parse_stmt(LexState *ls)
  2749. {
  2750.     BCLine line = ls->linenumber;
  2751.     switch (ls->tok) {
  2752.     case TK_if:
  2753.         parse_if(ls, line);
  2754.         break;
  2755.     case TK_while:
  2756.         parse_while(ls, line);
  2757.         break;
  2758.     case TK_do:
  2759.         kp_lex_next(ls);
  2760.         parse_block(ls);
  2761.         lex_match(ls, TK_end, TK_do, line);
  2762.         break;
  2763.     case TK_for:
  2764.         parse_for(ls, line);
  2765.         break;
  2766.     case TK_repeat:
  2767.         parse_repeat(ls, line);
  2768.         break;
  2769.     case TK_function:
  2770.         parse_func(ls, line);
  2771.         break;
  2772.     case TK_local:
  2773.         kp_lex_next(ls);
  2774.         parse_local(ls);
  2775.         break;
  2776.     case TK_return:
  2777.         parse_return(ls);
  2778.         return 1/* Must be last. */
  2779.     case TK_break:
  2780.         kp_lex_next(ls);
  2781.         parse_break(ls);
  2782.         return 0/* Must be last. */
  2783.     case ';':
  2784.         kp_lex_next(ls);
  2785.         break;
  2786.     case TK_label:
  2787.         parse_label(ls);
  2788.         break;
  2789.     case TK_trace:
  2790.     case TK_trace_end:
  2791.         parse_trace(ls);
  2792.         break;
  2793.     case TK_profile:
  2794.     case TK_tick:
  2795.         parse_timer(ls);
  2796.         break;
  2797.     default:
  2798.         parse_call_assign(ls);
  2799.         break;
  2800.     }
  2801.     return 0;
  2802. }

  2803. /* A chunk is a list of statements optionally separated by semicolons. */
  2804. static void parse_chunk(LexState *ls)
  2805. {
  2806.     int islast = 0;

  2807.     synlevel_begin(ls);
  2808.     while (!islast && !parse_isend(ls->tok)) {
  2809.         islast = parse_stmt(ls);
  2810.         lex_opt(ls, ';');
  2811.         kp_assert(ls->fs->framesize >= ls->fs->freereg &&
  2812.             ls->fs->freereg >= ls->fs->nactvar);
  2813.         /* Free registers after each stmt. */
  2814.         ls->fs->freereg = ls->fs->nactvar;
  2815.     }
  2816.     synlevel_end(ls);
  2817. }

  2818. /* Entry point of bytecode parser. */
  2819. ktap_proto_t *kp_parse(LexState *ls)
  2820. {
  2821.     FuncState fs;
  2822.     FuncScope bl;
  2823.     ktap_proto_t *pt;

  2824.     ls->chunkname = kp_str_newz(ls->chunkarg);
  2825.     ls->level = 0;
  2826.     fs_init(ls, &fs);
  2827.     fs.linedefined = 0;
  2828.     fs.numparams = 0;
  2829.     fs.bcbase = NULL;
  2830.     fs.bclim = 0;
  2831.     fs.flags |= PROTO_VARARG/* Main chunk is always a vararg func. */
  2832.     fscope_begin(&fs, &bl, 0);
  2833.     bcemit_AD(&fs, BC_FUNCV, 0, 0);  /* Placeholder. */
  2834.     kp_lex_next(ls);  /* Read-ahead first token. */
  2835.     parse_chunk(ls);
  2836.     if (ls->tok != TK_eof)
  2837.         err_token(ls, TK_eof);
  2838.     pt = fs_finish(ls, ls->linenumber);
  2839.     kp_assert(fs.prev == NULL);
  2840.     kp_assert(ls->fs == NULL);
  2841.     kp_assert(pt->sizeuv == 0);
  2842.     return pt;
  2843. }