Github User Fetcher 1.0.0
C Application with Server and GUI
Loading...
Searching...
No Matches
duktape-1.5.2/src-separate/duk_js_call.c
Go to the documentation of this file.
1/*
2 * Call handling.
3 *
4 * Main functions are:
5 *
6 * - duk_handle_call_unprotected(): unprotected call to Ecmascript or
7 * Duktape/C function
8 * - duk_handle_call_protected(): protected call to Ecmascript or
9 * Duktape/C function
10 * - duk_handle_safe_call(): make a protected C call within current
11 * activation
12 * - duk_handle_ecma_call_setup(): Ecmascript-to-Ecmascript calls
13 * (not always possible), including tail calls and coroutine resume
14 *
15 * See 'execution.rst'.
16 *
17 * Note: setjmp() and local variables have a nasty interaction,
18 * see execution.rst; non-volatile locals modified after setjmp()
19 * call are not guaranteed to keep their value.
20 */
21
22#include "duk_internal.h"
23
24/*
25 * Forward declarations.
26 */
27
29 duk_idx_t num_stack_args,
30 duk_small_uint_t call_flags,
31 duk_idx_t idx_func);
33 duk_size_t entry_valstack_bottom_index,
34 duk_size_t entry_valstack_end,
35 duk_size_t entry_catchstack_top,
36 duk_size_t entry_callstack_top,
37 duk_int_t entry_call_recursion_depth,
38 duk_hthread *entry_curr_thread,
39 duk_uint_fast8_t entry_thread_state,
40 duk_instr_t **entry_ptr_curr_pc,
41 duk_idx_t idx_func,
42 duk_jmpbuf *old_jmpbuf_ptr);
45 duk_idx_t idx_retbase,
46 duk_idx_t num_stack_rets,
47 duk_size_t entry_valstack_bottom_index,
48 duk_size_t entry_callstack_top,
49 duk_size_t entry_catchstack_top);
51 duk_idx_t idx_retbase,
52 duk_idx_t num_stack_rets,
53 duk_size_t entry_valstack_bottom_index,
54 duk_size_t entry_callstack_top,
55 duk_size_t entry_catchstack_top,
56 duk_jmpbuf *old_jmpbuf_ptr);
58 duk_idx_t idx_retbase,
59 duk_idx_t num_stack_rets,
60 duk_int_t entry_call_recursion_depth,
61 duk_hthread *entry_curr_thread,
62 duk_uint_fast8_t entry_thread_state,
63 duk_instr_t **entry_ptr_curr_pc);
64
65/*
66 * Interrupt counter fixup (for development only).
67 */
68
69#if defined(DUK_USE_INTERRUPT_COUNTER) && defined(DUK_USE_DEBUG)
70DUK_LOCAL void duk__interrupt_fixup(duk_hthread *thr, duk_hthread *entry_curr_thread) {
71 /* Currently the bytecode executor and executor interrupt
72 * instruction counts are off because we don't execute the
73 * interrupt handler when we're about to exit from the initial
74 * user call into Duktape.
75 *
76 * If we were to execute the interrupt handler here, the counts
77 * would match. You can enable this block manually to check
78 * that this is the case.
79 */
80
81 DUK_ASSERT(thr != NULL);
82 DUK_ASSERT(thr->heap != NULL);
83
84#if defined(DUK_USE_INTERRUPT_DEBUG_FIXUP)
85 if (entry_curr_thread == NULL) {
86 thr->interrupt_init = thr->interrupt_init - thr->interrupt_counter;
87 thr->heap->inst_count_interrupt += thr->interrupt_init;
88 DUK_DD(DUK_DDPRINT("debug test: updated interrupt count on exit to "
89 "user code, instruction counts: executor=%ld, interrupt=%ld",
90 (long) thr->heap->inst_count_exec, (long) thr->heap->inst_count_interrupt));
91 DUK_ASSERT(thr->heap->inst_count_exec == thr->heap->inst_count_interrupt);
92 }
93#else
94 DUK_UNREF(thr);
95 DUK_UNREF(entry_curr_thread);
96#endif
97}
98#endif
99
100/*
101 * Arguments object creation.
102 *
103 * Creating arguments objects involves many small details, see E5 Section
104 * 10.6 for the specific requirements. Much of the arguments object exotic
105 * behavior is implemented in duk_hobject_props.c, and is enabled by the
106 * object flag DUK_HOBJECT_FLAG_EXOTIC_ARGUMENTS.
107 */
108
110 duk_hobject *func,
111 duk_hobject *varenv,
112 duk_idx_t idx_argbase, /* idx of first argument on stack */
113 duk_idx_t num_stack_args) { /* num args starting from idx_argbase */
114 duk_context *ctx = (duk_context *) thr;
115 duk_hobject *arg; /* 'arguments' */
116 duk_hobject *formals; /* formals for 'func' (may be NULL if func is a C function) */
117 duk_idx_t i_arg;
118 duk_idx_t i_map;
119 duk_idx_t i_mappednames;
120 duk_idx_t i_formals;
121 duk_idx_t i_argbase;
122 duk_idx_t n_formals;
123 duk_idx_t idx;
124 duk_bool_t need_map;
125
126 DUK_DDD(DUK_DDDPRINT("creating arguments object for func=%!iO, varenv=%!iO, "
127 "idx_argbase=%ld, num_stack_args=%ld",
128 (duk_heaphdr *) func, (duk_heaphdr *) varenv,
129 (long) idx_argbase, (long) num_stack_args));
130
131 DUK_ASSERT(thr != NULL);
132 DUK_ASSERT(func != NULL);
134 DUK_ASSERT(varenv != NULL);
135 DUK_ASSERT(idx_argbase >= 0); /* assumed to bottom relative */
136 DUK_ASSERT(num_stack_args >= 0);
137
138 need_map = 0;
139
140 i_argbase = idx_argbase;
141 DUK_ASSERT(i_argbase >= 0);
142
143 duk_push_hobject(ctx, func);
145 formals = duk_get_hobject(ctx, -1);
146 n_formals = 0;
147 if (formals) {
149 n_formals = (duk_idx_t) duk_require_int(ctx, -1);
150 duk_pop(ctx);
151 }
152 duk_remove(ctx, -2); /* leave formals on stack for later use */
153 i_formals = duk_require_top_index(ctx);
154
155 DUK_ASSERT(n_formals >= 0);
156 DUK_ASSERT(formals != NULL || n_formals == 0);
157
158 DUK_DDD(DUK_DDDPRINT("func=%!O, formals=%!O, n_formals=%ld",
159 (duk_heaphdr *) func, (duk_heaphdr *) formals,
160 (long) n_formals));
161
162 /* [ ... formals ] */
163
164 /*
165 * Create required objects:
166 * - 'arguments' object: array-like, but not an array
167 * - 'map' object: internal object, tied to 'arguments'
168 * - 'mappedNames' object: temporary value used during construction
169 */
170
171 i_arg = duk_push_object_helper(ctx,
176 DUK_ASSERT(i_arg >= 0);
177 arg = duk_require_hobject(ctx, -1);
178 DUK_ASSERT(arg != NULL);
179
180 i_map = duk_push_object_helper(ctx,
183 -1); /* no prototype */
184 DUK_ASSERT(i_map >= 0);
185
186 i_mappednames = duk_push_object_helper(ctx,
189 -1); /* no prototype */
190 DUK_ASSERT(i_mappednames >= 0);
191
192 /* [ ... formals arguments map mappedNames ] */
193
194 DUK_DDD(DUK_DDDPRINT("created arguments related objects: "
195 "arguments at index %ld -> %!O "
196 "map at index %ld -> %!O "
197 "mappednames at index %ld -> %!O",
198 (long) i_arg, (duk_heaphdr *) duk_get_hobject(ctx, i_arg),
199 (long) i_map, (duk_heaphdr *) duk_get_hobject(ctx, i_map),
200 (long) i_mappednames, (duk_heaphdr *) duk_get_hobject(ctx, i_mappednames)));
201
202 /*
203 * Init arguments properties, map, etc.
204 */
205
206 duk_push_int(ctx, num_stack_args);
208
209 /*
210 * Init argument related properties
211 */
212
213 /* step 11 */
214 idx = num_stack_args - 1;
215 while (idx >= 0) {
216 DUK_DDD(DUK_DDDPRINT("arg idx %ld, argbase=%ld, argidx=%ld",
217 (long) idx, (long) i_argbase, (long) (i_argbase + idx)));
218
219 DUK_DDD(DUK_DDDPRINT("define arguments[%ld]=arg", (long) idx));
220 duk_dup(ctx, i_argbase + idx);
221 duk_xdef_prop_index_wec(ctx, i_arg, (duk_uarridx_t) idx);
222 DUK_DDD(DUK_DDDPRINT("defined arguments[%ld]=arg", (long) idx));
223
224 /* step 11.c is relevant only if non-strict (checked in 11.c.ii) */
225 if (!DUK_HOBJECT_HAS_STRICT(func) && idx < n_formals) {
226 DUK_ASSERT(formals != NULL);
227
228 DUK_DDD(DUK_DDDPRINT("strict function, index within formals (%ld < %ld)",
229 (long) idx, (long) n_formals));
230
231 duk_get_prop_index(ctx, i_formals, idx);
232 DUK_ASSERT(duk_is_string(ctx, -1));
233
234 duk_dup(ctx, -1); /* [ ... name name ] */
235
236 if (!duk_has_prop(ctx, i_mappednames)) {
237 /* steps 11.c.ii.1 - 11.c.ii.4, but our internal book-keeping
238 * differs from the reference model
239 */
240
241 /* [ ... name ] */
242
243 need_map = 1;
244
245 DUK_DDD(DUK_DDDPRINT("set mappednames[%s]=%ld",
246 (const char *) duk_get_string(ctx, -1),
247 (long) idx));
248 duk_dup(ctx, -1); /* name */
249 duk_push_uint(ctx, (duk_uint_t) idx); /* index */
250 duk_to_string(ctx, -1);
251 duk_xdef_prop_wec(ctx, i_mappednames); /* out of spec, must be configurable */
252
253 DUK_DDD(DUK_DDDPRINT("set map[%ld]=%s",
254 (long) idx,
255 duk_get_string(ctx, -1)));
256 duk_dup(ctx, -1); /* name */
257 duk_xdef_prop_index_wec(ctx, i_map, (duk_uarridx_t) idx); /* out of spec, must be configurable */
258 } else {
259 /* duk_has_prop() popped the second 'name' */
260 }
261
262 /* [ ... name ] */
263 duk_pop(ctx); /* pop 'name' */
264 }
265
266 idx--;
267 }
268
269 DUK_DDD(DUK_DDDPRINT("actual arguments processed"));
270
271 /* step 12 */
272 if (need_map) {
273 DUK_DDD(DUK_DDDPRINT("adding 'map' and 'varenv' to arguments object"));
274
275 /* should never happen for a strict callee */
277
278 duk_dup(ctx, i_map);
279 duk_xdef_prop_stridx(ctx, i_arg, DUK_STRIDX_INT_MAP, DUK_PROPDESC_FLAGS_NONE); /* out of spec, don't care */
280
281 /* The variable environment for magic variable bindings needs to be
282 * given by the caller and recorded in the arguments object.
283 *
284 * See E5 Section 10.6, the creation of setters/getters.
285 *
286 * The variable environment also provides access to the callee, so
287 * an explicit (internal) callee property is not needed.
288 */
289
290 duk_push_hobject(ctx, varenv);
291 duk_xdef_prop_stridx(ctx, i_arg, DUK_STRIDX_INT_VARENV, DUK_PROPDESC_FLAGS_NONE); /* out of spec, don't care */
292 }
293
294 /* steps 13-14 */
295 if (DUK_HOBJECT_HAS_STRICT(func)) {
296 /* Callee/caller are throwers and are not deletable etc. They
297 * could be implemented as virtual properties, but currently
298 * there is no support for virtual properties which are accessors
299 * (only plain virtual properties). This would not be difficult
300 * to change in duk_hobject_props, but we can make the throwers
301 * normal, concrete properties just as easily.
302 *
303 * Note that the specification requires that the *same* thrower
304 * built-in object is used here! See E5 Section 10.6 main
305 * algoritm, step 14, and Section 13.2.3 which describes the
306 * thrower. See test case test-arguments-throwers.js.
307 */
308
309 DUK_DDD(DUK_DDDPRINT("strict function, setting caller/callee to throwers"));
310
313 } else {
314 DUK_DDD(DUK_DDDPRINT("non-strict function, setting callee to actual value"));
315 duk_push_hobject(ctx, func);
317 }
318
319 /* set exotic behavior only after we're done */
320 if (need_map) {
321 /* Exotic behaviors are only enabled for arguments objects
322 * which have a parameter map (see E5 Section 10.6 main
323 * algorithm, step 12).
324 *
325 * In particular, a non-strict arguments object with no
326 * mapped formals does *NOT* get exotic behavior, even
327 * for e.g. "caller" property. This seems counterintuitive
328 * but seems to be the case.
329 */
330
331 /* cannot be strict (never mapped variables) */
333
334 DUK_DDD(DUK_DDDPRINT("enabling exotic behavior for arguments object"));
336 } else {
337 DUK_DDD(DUK_DDDPRINT("not enabling exotic behavior for arguments object"));
338 }
339
340 DUK_DDD(DUK_DDDPRINT("final arguments related objects: "
341 "arguments at index %ld -> %!O "
342 "map at index %ld -> %!O "
343 "mappednames at index %ld -> %!O",
344 (long) i_arg, (duk_heaphdr *) duk_get_hobject(ctx, i_arg),
345 (long) i_map, (duk_heaphdr *) duk_get_hobject(ctx, i_map),
346 (long) i_mappednames, (duk_heaphdr *) duk_get_hobject(ctx, i_mappednames)));
347
348 /* [ args(n) [crud] formals arguments map mappednames ] */
349
350 duk_pop_2(ctx);
351 duk_remove(ctx, -2);
352
353 /* [ args [crud] arguments ] */
354}
355
356/* Helper for creating the arguments object and adding it to the env record
357 * on top of the value stack. This helper has a very strict dependency on
358 * the shape of the input stack.
359 */
361 duk_hobject *func,
362 duk_hobject *env,
363 duk_idx_t num_stack_args) {
364 duk_context *ctx = (duk_context *) thr;
365
366 DUK_DDD(DUK_DDDPRINT("creating arguments object for function call"));
367
368 DUK_ASSERT(thr != NULL);
369 DUK_ASSERT(func != NULL);
370 DUK_ASSERT(env != NULL);
372 DUK_ASSERT(duk_get_top(ctx) >= num_stack_args + 1);
373
374 /* [ ... arg1 ... argN envobj ] */
375
377 func,
378 env,
379 duk_get_top(ctx) - num_stack_args - 1, /* idx_argbase */
380 num_stack_args);
381
382 /* [ ... arg1 ... argN envobj argobj ] */
383
385 -2,
387 DUK_HOBJECT_HAS_STRICT(func) ? DUK_PROPDESC_FLAGS_E : /* strict: non-deletable, non-writable */
388 DUK_PROPDESC_FLAGS_WE); /* non-strict: non-deletable, writable */
389 /* [ ... arg1 ... argN envobj ] */
390}
391
392/*
393 * Helper for handling a "bound function" chain when a call is being made.
394 *
395 * Follows the bound function chain until a non-bound function is found.
396 * Prepends the bound arguments to the value stack (at idx_func + 2),
397 * updating 'num_stack_args' in the process. The 'this' binding is also
398 * updated if necessary (at idx_func + 1). Note that for constructor calls
399 * the 'this' binding is never updated by [[BoundThis]].
400 *
401 * XXX: bound function chains could be collapsed at bound function creation
402 * time so that each bound function would point directly to a non-bound
403 * function. This would make call time handling much easier.
404 */
405
407 duk_idx_t idx_func,
408 duk_idx_t *p_num_stack_args, /* may be changed by call */
409 duk_bool_t is_constructor_call) {
410 duk_context *ctx = (duk_context *) thr;
411 duk_idx_t num_stack_args;
412 duk_tval *tv_func;
413 duk_hobject *func;
414 duk_uint_t sanity;
415
416 DUK_ASSERT(thr != NULL);
417 DUK_ASSERT(p_num_stack_args != NULL);
418
419 /* On entry, item at idx_func is a bound, non-lightweight function,
420 * but we don't rely on that below.
421 */
422
423 num_stack_args = *p_num_stack_args;
424
426 do {
427 duk_idx_t i, len;
428
429 tv_func = duk_require_tval(ctx, idx_func);
430 DUK_ASSERT(tv_func != NULL);
431
432 if (DUK_TVAL_IS_LIGHTFUNC(tv_func)) {
433 /* Lightweight function: never bound, so terminate. */
434 break;
435 } else if (DUK_TVAL_IS_OBJECT(tv_func)) {
436 func = DUK_TVAL_GET_OBJECT(tv_func);
437 if (!DUK_HOBJECT_HAS_BOUND(func)) {
438 /* Normal non-bound function. */
439 break;
440 }
441 } else {
442 /* Function.prototype.bind() should never let this happen,
443 * ugly error message is enough.
444 */
446 }
448
449 /* XXX: this could be more compact by accessing the internal properties
450 * directly as own properties (they cannot be inherited, and are not
451 * externally visible).
452 */
453
454 DUK_DDD(DUK_DDDPRINT("bound function encountered, ptr=%p, num_stack_args=%ld: %!T",
455 (void *) DUK_TVAL_GET_OBJECT(tv_func), (long) num_stack_args, tv_func));
456
457 /* [ ... func this arg1 ... argN ] */
458
459 if (is_constructor_call) {
460 /* See: tests/ecmascript/test-spec-bound-constructor.js */
461 DUK_DDD(DUK_DDDPRINT("constructor call: don't update this binding"));
462 } else {
464 duk_replace(ctx, idx_func + 1); /* idx_this = idx_func + 1 */
465 }
466
467 /* [ ... func this arg1 ... argN ] */
468
469 /* XXX: duk_get_length? */
470 duk_get_prop_stridx(ctx, idx_func, DUK_STRIDX_INT_ARGS); /* -> [ ... func this arg1 ... argN _Args ] */
471 duk_get_prop_stridx(ctx, -1, DUK_STRIDX_LENGTH); /* -> [ ... func this arg1 ... argN _Args length ] */
472 len = (duk_idx_t) duk_require_int(ctx, -1);
473 duk_pop(ctx);
474 for (i = 0; i < len; i++) {
475 /* XXX: very slow - better to bulk allocate a gap, and copy
476 * from args_array directly (we know it has a compact array
477 * part, etc).
478 */
479
480 /* [ ... func this <some bound args> arg1 ... argN _Args ] */
481 duk_get_prop_index(ctx, -1, i);
482 duk_insert(ctx, idx_func + 2 + i); /* idx_args = idx_func + 2 */
483 }
484 num_stack_args += len; /* must be updated to work properly (e.g. creation of 'arguments') */
485 duk_pop(ctx);
486
487 /* [ ... func this <bound args> arg1 ... argN ] */
488
490 duk_replace(ctx, idx_func); /* replace in stack */
491
492 DUK_DDD(DUK_DDDPRINT("bound function handled, num_stack_args=%ld, idx_func=%ld, curr func=%!T",
493 (long) num_stack_args, (long) idx_func, duk_get_tval(ctx, idx_func)));
494 } while (--sanity > 0);
495
496 if (sanity == 0) {
498 }
499
500 DUK_DDD(DUK_DDDPRINT("final non-bound function is: %!T", duk_get_tval(ctx, idx_func)));
501
502#if defined(DUK_USE_ASSERTIONS)
503 tv_func = duk_require_tval(ctx, idx_func);
505 if (DUK_TVAL_IS_OBJECT(tv_func)) {
506 func = DUK_TVAL_GET_OBJECT(tv_func);
507 DUK_ASSERT(func != NULL);
511 }
512#endif
513
514 /* write back */
515 *p_num_stack_args = num_stack_args;
516}
517
518/*
519 * Helper for setting up var_env and lex_env of an activation,
520 * assuming it does NOT have the DUK_HOBJECT_FLAG_NEWENV flag.
521 */
522
524 duk_hobject *func,
525 duk_activation *act) {
526 duk_tval *tv;
527
528 DUK_ASSERT(thr != NULL);
529 DUK_ASSERT(func != NULL);
530 DUK_ASSERT(act != NULL);
533
535 if (tv) {
538 act->lex_env = DUK_TVAL_GET_OBJECT(tv);
539
541 if (tv) {
544 act->var_env = DUK_TVAL_GET_OBJECT(tv);
545 } else {
546 act->var_env = act->lex_env;
547 }
548 } else {
550 act->var_env = act->lex_env;
551 }
552
555}
556
557/*
558 * Helper for updating callee 'caller' property.
559 */
560
561#if defined(DUK_USE_NONSTD_FUNC_CALLER_PROPERTY)
562DUK_LOCAL void duk__update_func_caller_prop(duk_hthread *thr, duk_hobject *func) {
563 duk_tval *tv_caller;
564 duk_hobject *h_tmp;
565 duk_activation *act_callee;
566 duk_activation *act_caller;
567
568 DUK_ASSERT(thr != NULL);
569 DUK_ASSERT(func != NULL);
570 DUK_ASSERT(!DUK_HOBJECT_HAS_BOUND(func)); /* bound chain resolved */
571 DUK_ASSERT(thr->callstack_top >= 1);
572
573 if (DUK_HOBJECT_HAS_STRICT(func)) {
574 /* Strict functions don't get their 'caller' updated. */
575 return;
576 }
577
578 act_callee = thr->callstack + thr->callstack_top - 1;
579 act_caller = (thr->callstack_top >= 2 ? act_callee - 1 : NULL);
580
581 /* XXX: check .caller writability? */
582
583 /* Backup 'caller' property and update its value. */
585 if (tv_caller) {
586 /* If caller is global/eval code, 'caller' should be set to
587 * 'null'.
588 *
589 * XXX: there is no exotic flag to infer this correctly now.
590 * The NEWENV flag is used now which works as intended for
591 * everything (global code, non-strict eval code, and functions)
592 * except strict eval code. Bound functions are never an issue
593 * because 'func' has been resolved to a non-bound function.
594 */
595
596 if (act_caller) {
597 /* act_caller->func may be NULL in some finalization cases,
598 * just treat like we don't know the caller.
599 */
600 if (act_caller->func && !DUK_HOBJECT_HAS_NEWENV(act_caller->func)) {
601 /* Setting to NULL causes 'caller' to be set to
602 * 'null' as desired.
603 */
604 act_caller = NULL;
605 }
606 }
607
608 if (DUK_TVAL_IS_OBJECT(tv_caller)) {
609 h_tmp = DUK_TVAL_GET_OBJECT(tv_caller);
610 DUK_ASSERT(h_tmp != NULL);
611 act_callee->prev_caller = h_tmp;
612
613 /* Previous value doesn't need refcount changes because its ownership
614 * is transferred to prev_caller.
615 */
616
617 if (act_caller) {
618 DUK_ASSERT(act_caller->func != NULL);
619 DUK_TVAL_SET_OBJECT(tv_caller, act_caller->func);
620 DUK_TVAL_INCREF(thr, tv_caller);
621 } else {
622 DUK_TVAL_SET_NULL(tv_caller); /* no incref */
623 }
624 } else {
625 /* 'caller' must only take on 'null' or function value */
627 DUK_ASSERT(act_callee->prev_caller == NULL);
628 if (act_caller && act_caller->func) {
629 /* Tolerate act_caller->func == NULL which happens in
630 * some finalization cases; treat like unknown caller.
631 */
632 DUK_TVAL_SET_OBJECT(tv_caller, act_caller->func);
633 DUK_TVAL_INCREF(thr, tv_caller);
634 } else {
635 DUK_TVAL_SET_NULL(tv_caller); /* no incref */
636 }
637 }
638 }
639}
640#endif /* DUK_USE_NONSTD_FUNC_CALLER_PROPERTY */
641
642/*
643 * Determine the effective 'this' binding and coerce the current value
644 * on the valstack to the effective one (in-place, at idx_this).
645 *
646 * The current this value in the valstack (at idx_this) represents either:
647 * - the caller's requested 'this' binding; or
648 * - a 'this' binding accumulated from the bound function chain
649 *
650 * The final 'this' binding for the target function may still be
651 * different, and is determined as described in E5 Section 10.4.3.
652 *
653 * For global and eval code (E5 Sections 10.4.1 and 10.4.2), we assume
654 * that the caller has provided the correct 'this' binding explicitly
655 * when calling, i.e.:
656 *
657 * - global code: this=global object
658 * - direct eval: this=copy from eval() caller's this binding
659 * - other eval: this=global object
660 *
661 * Note: this function may cause a recursive function call with arbitrary
662 * side effects, because ToObject() may be called.
663 */
664
666 duk_hobject *func,
667 duk_idx_t idx_this) {
668 duk_context *ctx = (duk_context *) thr;
669 duk_tval *tv_this;
670 duk_hobject *obj_global;
671
672 if (func == NULL || DUK_HOBJECT_HAS_STRICT(func)) {
673 /* Lightfuncs are always considered strict. */
674 DUK_DDD(DUK_DDDPRINT("this binding: strict -> use directly"));
675 return;
676 }
677
678 /* XXX: byte offset */
679 tv_this = thr->valstack_bottom + idx_this;
680 switch (DUK_TVAL_GET_TAG(tv_this)) {
681 case DUK_TAG_OBJECT:
682 case DUK_TAG_LIGHTFUNC: /* lightfuncs are treated like objects and not coerced */
683 DUK_DDD(DUK_DDDPRINT("this binding: non-strict, object -> use directly"));
684 break;
686 case DUK_TAG_NULL:
687 DUK_DDD(DUK_DDDPRINT("this binding: non-strict, undefined/null -> use global object"));
688 obj_global = thr->builtins[DUK_BIDX_GLOBAL];
689 /* XXX: avoid this check somehow */
690 if (DUK_LIKELY(obj_global != NULL)) {
691 DUK_ASSERT(!DUK_TVAL_IS_HEAP_ALLOCATED(tv_this)); /* no need to decref previous value */
692 DUK_TVAL_SET_OBJECT(tv_this, obj_global);
693 DUK_HOBJECT_INCREF(thr, obj_global);
694 } else {
695 /* This may only happen if built-ins are being "torn down".
696 * This behavior is out of specification scope.
697 */
698 DUK_D(DUK_DPRINT("this binding: wanted to use global object, but it is NULL -> using undefined instead"));
699 DUK_ASSERT(!DUK_TVAL_IS_HEAP_ALLOCATED(tv_this)); /* no need to decref previous value */
700 DUK_TVAL_SET_UNDEFINED(tv_this); /* nothing to incref */
701 }
702 break;
703 default:
705 DUK_DDD(DUK_DDDPRINT("this binding: non-strict, not object/undefined/null -> use ToObject(value)"));
706 duk_to_object(ctx, idx_this); /* may have side effects */
707 break;
708 }
709}
710
711/*
712 * Shared helper for non-bound func lookup.
713 *
714 * Returns duk_hobject * to the final non-bound function (NULL for lightfunc).
715 */
716
718 duk_idx_t idx_func,
719 duk_idx_t *out_num_stack_args,
720 duk_tval **out_tv_func,
721 duk_small_uint_t call_flags) {
722 duk_hthread *thr = (duk_hthread *) ctx;
723 duk_tval *tv_func;
724 duk_hobject *func;
725
726 for (;;) {
727 /* Use loop to minimize code size of relookup after bound function case */
728 tv_func = DUK_GET_TVAL_POSIDX(ctx, idx_func);
729 DUK_ASSERT(tv_func != NULL);
730
731 if (DUK_TVAL_IS_OBJECT(tv_func)) {
732 func = DUK_TVAL_GET_OBJECT(tv_func);
733 if (!DUK_HOBJECT_IS_CALLABLE(func)) {
734 goto not_callable_error;
735 }
736 if (DUK_HOBJECT_HAS_BOUND(func)) {
737 duk__handle_bound_chain_for_call(thr, idx_func, out_num_stack_args, call_flags & DUK_CALL_FLAG_CONSTRUCTOR_CALL);
738
739 /* The final object may be a normal function or a lightfunc.
740 * We need to re-lookup tv_func because it may have changed
741 * (also value stack may have been resized). Loop again to
742 * do that; we're guaranteed not to come here again.
743 */
746 continue;
747 }
748 } else if (DUK_TVAL_IS_LIGHTFUNC(tv_func)) {
749 func = NULL;
750 } else {
751 goto not_callable_error;
752 }
753 break;
754 }
755
757 DUK_TVAL_IS_LIGHTFUNC(tv_func));
758 DUK_ASSERT(func == NULL || !DUK_HOBJECT_HAS_BOUND(func));
761
762 *out_tv_func = tv_func;
763 return func;
764
765 not_callable_error:
766 DUK_ASSERT(tv_func != NULL);
767#if defined(DUK_USE_PARANOID_ERRORS)
769#else
770 DUK_ERROR_FMT1(thr, DUK_ERR_TYPE_ERROR, "%s not callable", duk_push_string_tval_readable(ctx, tv_func));
771#endif
773 return NULL; /* never executed */
774}
775
776/*
777 * Value stack resize and stack top adjustment helper.
778 *
779 * XXX: This should all be merged to duk_valstack_resize_raw().
780 */
781
783 duk_idx_t num_stack_args,
784 duk_idx_t idx_args,
785 duk_idx_t nregs,
786 duk_idx_t nargs,
787 duk_hobject *func) {
788 duk_context *ctx = (duk_context *) thr;
789 duk_size_t vs_min_size;
790 duk_bool_t adjusted_top = 0;
791
792 vs_min_size = (thr->valstack_bottom - thr->valstack) + /* bottom of current func */
793 idx_args; /* bottom of new func */
794
795 if (nregs >= 0) {
796 DUK_ASSERT(nargs >= 0);
797 DUK_ASSERT(nregs >= nargs);
798 vs_min_size += nregs;
799 } else {
800 /* 'func' wants stack "as is" */
801 vs_min_size += num_stack_args; /* num entries of new func at entry */
802 }
803 if (func == NULL || DUK_HOBJECT_IS_NATIVEFUNCTION(func)) {
804 vs_min_size += DUK_VALSTACK_API_ENTRY_MINIMUM; /* Duktape/C API guaranteed entries (on top of args) */
805 }
806 vs_min_size += DUK_VALSTACK_INTERNAL_EXTRA; /* + spare */
807
808 /* XXX: We can't resize the value stack to a size smaller than the
809 * current top, so the order of the resize and adjusting the stack
810 * top depends on the current vs. final size of the value stack.
811 * The operations could be combined to avoid this, but the proper
812 * fix is to only grow the value stack on a function call, and only
813 * shrink it (without throwing if the shrink fails) on function
814 * return.
815 */
816
817 if (vs_min_size < (duk_size_t) (thr->valstack_top - thr->valstack)) {
818 DUK_DDD(DUK_DDDPRINT(("final size smaller, set top before resize")));
819
820 DUK_ASSERT(nregs >= 0); /* can't happen when keeping current stack size */
821 duk_set_top(ctx, idx_args + nargs); /* clamp anything above nargs */
822 duk_set_top(ctx, idx_args + nregs); /* extend with undefined */
823 adjusted_top = 1;
824 }
825
827 vs_min_size,
828 DUK_VSRESIZE_FLAG_SHRINK | /* flags */
829 0 /* no compact */ |
831
832 if (!adjusted_top) {
833 if (nregs >= 0) {
834 DUK_ASSERT(nregs >= nargs);
835 duk_set_top(ctx, idx_args + nargs); /* clamp anything above nargs */
836 duk_set_top(ctx, idx_args + nregs); /* extend with undefined */
837 }
838 }
839}
840
841/*
842 * Manipulate value stack so that exactly 'num_stack_rets' return
843 * values are at 'idx_retbase' in every case, assuming there are
844 * 'rc' return values on top of stack.
845 *
846 * This is a bit tricky, because the called C function operates in
847 * the same activation record and may have e.g. popped the stack
848 * empty (below idx_retbase).
849 */
850
851DUK_LOCAL void duk__safe_call_adjust_valstack(duk_hthread *thr, duk_idx_t idx_retbase, duk_idx_t num_stack_rets, duk_idx_t num_actual_rets) {
852 duk_context *ctx = (duk_context *) thr;
853 duk_idx_t idx_rcbase;
854
855 DUK_ASSERT(thr != NULL);
856 DUK_ASSERT(idx_retbase >= 0);
857 DUK_ASSERT(num_stack_rets >= 0);
858 DUK_ASSERT(num_actual_rets >= 0);
859
860 idx_rcbase = duk_get_top(ctx) - num_actual_rets; /* base of known return values */
861
862 DUK_DDD(DUK_DDDPRINT("adjust valstack after func call: "
863 "num_stack_rets=%ld, num_actual_rets=%ld, stack_top=%ld, idx_retbase=%ld, idx_rcbase=%ld",
864 (long) num_stack_rets, (long) num_actual_rets, (long) duk_get_top(ctx),
865 (long) idx_retbase, (long) idx_rcbase));
866
867 DUK_ASSERT(idx_rcbase >= 0); /* caller must check */
868
869 /* Ensure space for final configuration (idx_retbase + num_stack_rets)
870 * and intermediate configurations.
871 */
873 (idx_rcbase > idx_retbase ? idx_rcbase : idx_retbase) +
874 num_stack_rets);
875
876 /* Chop extra retvals away / extend with undefined. */
877 duk_set_top(ctx, idx_rcbase + num_stack_rets);
878
879 if (idx_rcbase >= idx_retbase) {
880 duk_idx_t count = idx_rcbase - idx_retbase;
881 duk_idx_t i;
882
883 DUK_DDD(DUK_DDDPRINT("elements at/after idx_retbase have enough to cover func retvals "
884 "(idx_retbase=%ld, idx_rcbase=%ld)", (long) idx_retbase, (long) idx_rcbase));
885
886 /* nuke values at idx_retbase to get the first retval (initially
887 * at idx_rcbase) to idx_retbase
888 */
889
890 DUK_ASSERT(count >= 0);
891
892 for (i = 0; i < count; i++) {
893 /* XXX: inefficient; block remove primitive */
894 duk_remove(ctx, idx_retbase);
895 }
896 } else {
897 duk_idx_t count = idx_retbase - idx_rcbase;
898 duk_idx_t i;
899
900 DUK_DDD(DUK_DDDPRINT("not enough elements at/after idx_retbase to cover func retvals "
901 "(idx_retbase=%ld, idx_rcbase=%ld)", (long) idx_retbase, (long) idx_rcbase));
902
903 /* insert 'undefined' values at idx_rcbase to get the
904 * return values to idx_retbase
905 */
906
907 DUK_ASSERT(count > 0);
908
909 for (i = 0; i < count; i++) {
910 /* XXX: inefficient; block insert primitive */
912 duk_insert(ctx, idx_rcbase);
913 }
914 }
915}
916
917/*
918 * Misc shared helpers.
919 */
920
921/* Get valstack index for the func argument or throw if insane stack. */
923 duk_size_t off_stack_top;
924 duk_size_t off_stack_args;
925 duk_size_t off_stack_all;
926 duk_idx_t idx_func; /* valstack index of 'func' and retval (relative to entry valstack_bottom) */
927
928 /* Argument validation and func/args offset. */
929 off_stack_top = (duk_size_t) ((duk_uint8_t *) thr->valstack_top - (duk_uint8_t *) thr->valstack_bottom);
930 off_stack_args = (duk_size_t) ((duk_size_t) num_stack_args * sizeof(duk_tval));
931 off_stack_all = off_stack_args + 2 * sizeof(duk_tval);
932 if (DUK_UNLIKELY(off_stack_all > off_stack_top)) {
933 /* Since stack indices are not reliable, we can't do anything useful
934 * here. Invoke the existing setjmp catcher, or if it doesn't exist,
935 * call the fatal error handler.
936 */
938 return 0;
939 }
940 idx_func = (duk_idx_t) ((off_stack_top - off_stack_all) / sizeof(duk_tval));
941 return idx_func;
942}
943
944/*
945 * duk_handle_call_protected() and duk_handle_call_unprotected():
946 * call into a Duktape/C or an Ecmascript function from any state.
947 *
948 * Input stack (thr):
949 *
950 * [ func this arg1 ... argN ]
951 *
952 * Output stack (thr):
953 *
954 * [ retval ] (DUK_EXEC_SUCCESS)
955 * [ errobj ] (DUK_EXEC_ERROR (normal error), protected call)
956 *
957 * Even when executing a protected call an error may be thrown in rare cases
958 * such as an insane num_stack_args argument. If there is no catchpoint for
959 * such errors, the fatal error handler is called.
960 *
961 * The error handling path should be error free, even for out-of-memory
962 * errors, to ensure safe sandboxing. (As of Duktape 1.4.0 this is not
963 * yet the case, see XXX notes below.)
964 */
965
967 duk_idx_t num_stack_args,
968 duk_small_uint_t call_flags) {
969 duk_context *ctx;
970 duk_size_t entry_valstack_bottom_index;
971 duk_size_t entry_valstack_end;
972 duk_size_t entry_callstack_top;
973 duk_size_t entry_catchstack_top;
974 duk_int_t entry_call_recursion_depth;
975 duk_hthread *entry_curr_thread;
976 duk_uint_fast8_t entry_thread_state;
977 duk_instr_t **entry_ptr_curr_pc;
978 duk_jmpbuf *old_jmpbuf_ptr = NULL;
979 duk_jmpbuf our_jmpbuf;
980 duk_idx_t idx_func; /* valstack index of 'func' and retval (relative to entry valstack_bottom) */
981
982 /* XXX: Multiple tv_func lookups are now avoided by making a local
983 * copy of tv_func. Another approach would be to compute an offset
984 * for tv_func from valstack bottom and recomputing the tv_func
985 * pointer quickly as valstack + offset instead of calling duk_get_tval().
986 */
987
988 ctx = (duk_context *) thr;
989 DUK_UNREF(ctx);
990 DUK_ASSERT(thr != NULL);
992 DUK_ASSERT(num_stack_args >= 0);
993 /* XXX: currently NULL allocations are not supported; remove if later allowed */
994 DUK_ASSERT(thr->valstack != NULL);
995 DUK_ASSERT(thr->callstack != NULL);
996 DUK_ASSERT(thr->catchstack != NULL);
997
998 /* Argument validation and func/args offset. */
999 idx_func = duk__get_idx_func(thr, num_stack_args);
1000
1001 /* Preliminaries, required by setjmp() handler. Must be careful not
1002 * to throw an unintended error here.
1003 */
1004
1005 entry_valstack_bottom_index = (duk_size_t) (thr->valstack_bottom - thr->valstack);
1006#if defined(DUK_USE_PREFER_SIZE)
1007 entry_valstack_end = (duk_size_t) (thr->valstack_end - thr->valstack);
1008#else
1009 DUK_ASSERT((duk_size_t) (thr->valstack_end - thr->valstack) == thr->valstack_size);
1010 entry_valstack_end = thr->valstack_size;
1011#endif
1012 entry_callstack_top = thr->callstack_top;
1013 entry_catchstack_top = thr->catchstack_top;
1014 entry_call_recursion_depth = thr->heap->call_recursion_depth;
1015 entry_curr_thread = thr->heap->curr_thread; /* Note: may be NULL if first call */
1016 entry_thread_state = thr->state;
1017 entry_ptr_curr_pc = thr->ptr_curr_pc; /* may be NULL */
1018
1019 DUK_DD(DUK_DDPRINT("duk_handle_call_protected: thr=%p, num_stack_args=%ld, "
1020 "call_flags=0x%08lx (ignorerec=%ld, constructor=%ld), "
1021 "valstack_top=%ld, idx_func=%ld, idx_args=%ld, rec_depth=%ld/%ld, "
1022 "entry_valstack_bottom_index=%ld, entry_callstack_top=%ld, entry_catchstack_top=%ld, "
1023 "entry_call_recursion_depth=%ld, entry_curr_thread=%p, entry_thread_state=%ld",
1024 (void *) thr,
1025 (long) num_stack_args,
1026 (unsigned long) call_flags,
1027 (long) ((call_flags & DUK_CALL_FLAG_IGNORE_RECLIMIT) != 0 ? 1 : 0),
1028 (long) ((call_flags & DUK_CALL_FLAG_CONSTRUCTOR_CALL) != 0 ? 1 : 0),
1029 (long) duk_get_top(ctx),
1030 (long) idx_func,
1031 (long) (idx_func + 2),
1032 (long) thr->heap->call_recursion_depth,
1033 (long) thr->heap->call_recursion_limit,
1034 (long) entry_valstack_bottom_index,
1035 (long) entry_callstack_top,
1036 (long) entry_catchstack_top,
1037 (long) entry_call_recursion_depth,
1038 (void *) entry_curr_thread,
1039 (long) entry_thread_state));
1040
1041 old_jmpbuf_ptr = thr->heap->lj.jmpbuf_ptr;
1042 thr->heap->lj.jmpbuf_ptr = &our_jmpbuf;
1043
1044#if defined(DUK_USE_CPP_EXCEPTIONS)
1045 try {
1046#else
1047 DUK_ASSERT(thr->heap->lj.jmpbuf_ptr == &our_jmpbuf);
1048 if (DUK_SETJMP(our_jmpbuf.jb) == 0) {
1049#endif
1050 /* Call handling and success path. Success path exit cleans
1051 * up almost all state.
1052 */
1053 duk__handle_call_inner(thr, num_stack_args, call_flags, idx_func);
1054
1055 /* Success path handles */
1056 DUK_ASSERT(thr->heap->call_recursion_depth == entry_call_recursion_depth);
1057 DUK_ASSERT(thr->ptr_curr_pc == entry_ptr_curr_pc);
1058
1059 /* Longjmp state is kept clean in success path */
1061 DUK_ASSERT(thr->heap->lj.iserror == 0);
1064
1065 thr->heap->lj.jmpbuf_ptr = old_jmpbuf_ptr;
1066
1067 return DUK_EXEC_SUCCESS;
1068#if defined(DUK_USE_CPP_EXCEPTIONS)
1069 } catch (duk_internal_exception &exc) {
1070#else
1071 } else {
1072#endif
1073 /* Error; error value is in heap->lj.value1. */
1074
1075#if defined(DUK_USE_CPP_EXCEPTIONS)
1076 DUK_UNREF(exc);
1077#endif
1078
1080 entry_valstack_bottom_index,
1081 entry_valstack_end,
1082 entry_catchstack_top,
1083 entry_callstack_top,
1084 entry_call_recursion_depth,
1085 entry_curr_thread,
1086 entry_thread_state,
1087 entry_ptr_curr_pc,
1088 idx_func,
1089 old_jmpbuf_ptr);
1090
1091 /* Longjmp state is cleaned up by error handling */
1093 DUK_ASSERT(thr->heap->lj.iserror == 0);
1096 return DUK_EXEC_ERROR;
1097 }
1098#if defined(DUK_USE_CPP_EXCEPTIONS)
1099 catch (std::exception &exc) {
1100 const char *what = exc.what();
1101 if (!what) {
1102 what = "unknown";
1103 }
1104 DUK_D(DUK_DPRINT("unexpected c++ std::exception (perhaps thrown by user code)"));
1105 try {
1106 DUK_ERROR_FMT1(thr, DUK_ERR_API_ERROR, "caught invalid c++ std::exception '%s' (perhaps thrown by user code)", what);
1107 } catch (duk_internal_exception exc) {
1108 DUK_D(DUK_DPRINT("caught api error thrown from unexpected c++ std::exception"));
1109 DUK_UNREF(exc);
1111 entry_valstack_bottom_index,
1112 entry_valstack_end,
1113 entry_catchstack_top,
1114 entry_callstack_top,
1115 entry_call_recursion_depth,
1116 entry_curr_thread,
1117 entry_thread_state,
1118 entry_ptr_curr_pc,
1119 idx_func,
1120 old_jmpbuf_ptr);
1121 return DUK_EXEC_ERROR;
1122 }
1123 } catch (...) {
1124 DUK_D(DUK_DPRINT("unexpected c++ exception (perhaps thrown by user code)"));
1125 try {
1126 DUK_ERROR_API(thr, "caught invalid c++ exception (perhaps thrown by user code)");
1127 } catch (duk_internal_exception exc) {
1128 DUK_D(DUK_DPRINT("caught api error thrown from unexpected c++ exception"));
1129 DUK_UNREF(exc);
1131 entry_valstack_bottom_index,
1132 entry_valstack_end,
1133 entry_catchstack_top,
1134 entry_callstack_top,
1135 entry_call_recursion_depth,
1136 entry_curr_thread,
1137 entry_thread_state,
1138 entry_ptr_curr_pc,
1139 idx_func,
1140 old_jmpbuf_ptr);
1141 return DUK_EXEC_ERROR;
1142 }
1143 }
1144#endif
1145}
1146
1148 duk_idx_t num_stack_args,
1149 duk_small_uint_t call_flags) {
1150 duk_idx_t idx_func; /* valstack index of 'func' and retval (relative to entry valstack_bottom) */
1151
1152 /* Argument validation and func/args offset. */
1153 idx_func = duk__get_idx_func(thr, num_stack_args);
1154
1155 duk__handle_call_inner(thr, num_stack_args, call_flags, idx_func);
1156}
1157
1159 duk_idx_t num_stack_args,
1160 duk_small_uint_t call_flags,
1161 duk_idx_t idx_func) {
1162 duk_context *ctx;
1163 duk_size_t entry_valstack_bottom_index;
1164 duk_size_t entry_valstack_end;
1165 duk_size_t entry_callstack_top;
1166 duk_size_t entry_catchstack_top;
1167 duk_int_t entry_call_recursion_depth;
1168 duk_hthread *entry_curr_thread;
1169 duk_uint_fast8_t entry_thread_state;
1170 duk_instr_t **entry_ptr_curr_pc;
1171 duk_idx_t nargs; /* # argument registers target function wants (< 0 => "as is") */
1172 duk_idx_t nregs; /* # total registers target function wants on entry (< 0 => "as is") */
1173 duk_hobject *func; /* 'func' on stack (borrowed reference) */
1174 duk_tval *tv_func; /* duk_tval ptr for 'func' on stack (borrowed reference) or tv_func_copy */
1175 duk_tval tv_func_copy; /* to avoid relookups */
1176 duk_activation *act;
1177 duk_hobject *env;
1178 duk_ret_t rc;
1179
1180 ctx = (duk_context *) thr;
1181 DUK_ASSERT(thr != NULL);
1183 DUK_ASSERT(ctx != NULL);
1184 DUK_ASSERT(num_stack_args >= 0);
1185 /* XXX: currently NULL allocations are not supported; remove if later allowed */
1186 DUK_ASSERT(thr->valstack != NULL);
1187 DUK_ASSERT(thr->callstack != NULL);
1188 DUK_ASSERT(thr->catchstack != NULL);
1189
1190 DUK_DD(DUK_DDPRINT("duk__handle_call_inner: num_stack_args=%ld, call_flags=0x%08lx, top=%ld",
1191 (long) num_stack_args, (long) call_flags, (long) duk_get_top(ctx)));
1192
1193 /*
1194 * Store entry state.
1195 */
1196
1197 entry_valstack_bottom_index = (duk_size_t) (thr->valstack_bottom - thr->valstack);
1198#if defined(DUK_USE_PREFER_SIZE)
1199 entry_valstack_end = (duk_size_t) (thr->valstack_end - thr->valstack);
1200#else
1201 DUK_ASSERT((duk_size_t) (thr->valstack_end - thr->valstack) == thr->valstack_size);
1202 entry_valstack_end = thr->valstack_size;
1203#endif
1204 entry_callstack_top = thr->callstack_top;
1205 entry_catchstack_top = thr->catchstack_top;
1206 entry_call_recursion_depth = thr->heap->call_recursion_depth;
1207 entry_curr_thread = thr->heap->curr_thread; /* Note: may be NULL if first call */
1208 entry_thread_state = thr->state;
1209 entry_ptr_curr_pc = thr->ptr_curr_pc; /* may be NULL */
1210
1211 /* If thr->ptr_curr_pc is set, sync curr_pc to act->pc. Then NULL
1212 * thr->ptr_curr_pc so that it's not accidentally used with an incorrect
1213 * activation when side effects occur.
1214 */
1216
1217 DUK_DD(DUK_DDPRINT("duk__handle_call_inner: thr=%p, num_stack_args=%ld, "
1218 "call_flags=0x%08lx (ignorerec=%ld, constructor=%ld), "
1219 "valstack_top=%ld, idx_func=%ld, idx_args=%ld, rec_depth=%ld/%ld, "
1220 "entry_valstack_bottom_index=%ld, entry_callstack_top=%ld, entry_catchstack_top=%ld, "
1221 "entry_call_recursion_depth=%ld, entry_curr_thread=%p, entry_thread_state=%ld",
1222 (void *) thr,
1223 (long) num_stack_args,
1224 (unsigned long) call_flags,
1225 (long) ((call_flags & DUK_CALL_FLAG_IGNORE_RECLIMIT) != 0 ? 1 : 0),
1226 (long) ((call_flags & DUK_CALL_FLAG_CONSTRUCTOR_CALL) != 0 ? 1 : 0),
1227 (long) duk_get_top(ctx),
1228 (long) idx_func,
1229 (long) (idx_func + 2),
1230 (long) thr->heap->call_recursion_depth,
1231 (long) thr->heap->call_recursion_limit,
1232 (long) entry_valstack_bottom_index,
1233 (long) entry_callstack_top,
1234 (long) entry_catchstack_top,
1235 (long) entry_call_recursion_depth,
1236 (void *) entry_curr_thread,
1237 (long) entry_thread_state));
1238
1239
1240 /*
1241 * Thread state check and book-keeping.
1242 */
1243
1244 if (thr == thr->heap->curr_thread) {
1245 /* same thread */
1246 if (thr->state != DUK_HTHREAD_STATE_RUNNING) {
1247 /* should actually never happen, but check anyway */
1248 goto thread_state_error;
1249 }
1250 } else {
1251 /* different thread */
1252 DUK_ASSERT(thr->heap->curr_thread == NULL ||
1254 if (thr->state != DUK_HTHREAD_STATE_INACTIVE) {
1255 goto thread_state_error;
1256 }
1257 DUK_HEAP_SWITCH_THREAD(thr->heap, thr);
1259
1260 /* Note: multiple threads may be simultaneously in the RUNNING
1261 * state, but not in the same "resume chain".
1262 */
1263 }
1264 DUK_ASSERT(thr->heap->curr_thread == thr);
1266
1267 /*
1268 * C call recursion depth check, which provides a reasonable upper
1269 * bound on maximum C stack size (arbitrary C stack growth is only
1270 * possible by recursive handle_call / handle_safe_call calls).
1271 */
1272
1273 /* XXX: remove DUK_CALL_FLAG_IGNORE_RECLIMIT flag: there's now the
1274 * reclimit bump?
1275 */
1276
1279 if (call_flags & DUK_CALL_FLAG_IGNORE_RECLIMIT) {
1280 DUK_DD(DUK_DDPRINT("ignoring reclimit for this call (probably an errhandler call)"));
1281 } else {
1283 /* XXX: error message is a bit misleading: we reached a recursion
1284 * limit which is also essentially the same as a C callstack limit
1285 * (except perhaps with some relaxed threading assumptions).
1286 */
1288 }
1289 thr->heap->call_recursion_depth++;
1290 }
1291
1292 /*
1293 * Check the function type, handle bound function chains, and prepare
1294 * parameters for the rest of the call handling. Also figure out the
1295 * effective 'this' binding, which replaces the current value at
1296 * idx_func + 1.
1297 *
1298 * If the target function is a 'bound' one, follow the chain of 'bound'
1299 * functions until a non-bound function is found. During this process,
1300 * bound arguments are 'prepended' to existing ones, and the "this"
1301 * binding is overridden. See E5 Section 15.3.4.5.1.
1302 *
1303 * Lightfunc detection happens here too. Note that lightweight functions
1304 * can be wrapped by (non-lightweight) bound functions so we must resolve
1305 * the bound function chain first.
1306 */
1307
1308 func = duk__nonbound_func_lookup(ctx, idx_func, &num_stack_args, &tv_func, call_flags);
1309 DUK_TVAL_SET_TVAL(&tv_func_copy, tv_func);
1310 tv_func = &tv_func_copy; /* local copy to avoid relookups */
1311
1312 DUK_ASSERT(func == NULL || !DUK_HOBJECT_HAS_BOUND(func));
1315
1316 duk__coerce_effective_this_binding(thr, func, idx_func + 1);
1317 DUK_DDD(DUK_DDDPRINT("effective 'this' binding is: %!T",
1318 (duk_tval *) duk_get_tval(ctx, idx_func + 1)));
1319
1320 /* [ ... func this arg1 ... argN ] */
1321
1322 /*
1323 * Setup a preliminary activation and figure out nargs/nregs.
1324 *
1325 * Don't touch valstack_bottom or valstack_top yet so that Duktape API
1326 * calls work normally.
1327 */
1328
1330
1331 if (thr->callstack_top > 0) {
1332 /*
1333 * Update idx_retval of current activation.
1334 *
1335 * Although it might seem this is not necessary (bytecode executor
1336 * does this for Ecmascript-to-Ecmascript calls; other calls are
1337 * handled here), this turns out to be necessary for handling yield
1338 * and resume. For them, an Ecmascript-to-native call happens, and
1339 * the Ecmascript call's idx_retval must be set for things to work.
1340 */
1341
1342 (thr->callstack + thr->callstack_top - 1)->idx_retval = entry_valstack_bottom_index + idx_func;
1343 }
1344
1346 act = thr->callstack + thr->callstack_top;
1347 thr->callstack_top++;
1349 DUK_ASSERT(thr->valstack_top > thr->valstack_bottom); /* at least effective 'this' */
1350 DUK_ASSERT(func == NULL || !DUK_HOBJECT_HAS_BOUND(func));
1351
1352 act->flags = 0;
1353
1354 /* For now all calls except Ecma-to-Ecma calls prevent a yield. */
1356 if (call_flags & DUK_CALL_FLAG_CONSTRUCTOR_CALL) {
1358 }
1359 if (call_flags & DUK_CALL_FLAG_DIRECT_EVAL) {
1361 }
1362
1363 /* These base values are never used, but if the compiler doesn't know
1364 * that DUK_ERROR() won't return, these are needed to silence warnings.
1365 * On the other hand, scan-build will warn about the values not being
1366 * used, so add a DUK_UNREF.
1367 */
1368 nargs = 0; DUK_UNREF(nargs);
1369 nregs = 0; DUK_UNREF(nregs);
1370
1371 if (DUK_LIKELY(func != NULL)) {
1372 if (DUK_HOBJECT_HAS_STRICT(func)) {
1373 act->flags |= DUK_ACT_FLAG_STRICT;
1374 }
1376 nargs = ((duk_hcompiledfunction *) func)->nargs;
1377 nregs = ((duk_hcompiledfunction *) func)->nregs;
1378 DUK_ASSERT(nregs >= nargs);
1379 } else if (DUK_HOBJECT_IS_NATIVEFUNCTION(func)) {
1380 /* Note: nargs (and nregs) may be negative for a native,
1381 * function, which indicates that the function wants the
1382 * input stack "as is" (i.e. handles "vararg" arguments).
1383 */
1384 nargs = ((duk_hnativefunction *) func)->nargs;
1385 nregs = nargs;
1386 } else {
1387 /* XXX: this should be an assert */
1389 }
1390 } else {
1391 duk_small_uint_t lf_flags;
1392
1394 lf_flags = DUK_TVAL_GET_LIGHTFUNC_FLAGS(tv_func);
1395 nargs = DUK_LFUNC_FLAGS_GET_NARGS(lf_flags);
1396 if (nargs == DUK_LFUNC_NARGS_VARARGS) {
1397 nargs = -1; /* vararg */
1398 }
1399 nregs = nargs;
1400
1401 act->flags |= DUK_ACT_FLAG_STRICT;
1402 }
1403
1404 act->func = func; /* NULL for lightfunc */
1405 act->var_env = NULL;
1406 act->lex_env = NULL;
1407#if defined(DUK_USE_NONSTD_FUNC_CALLER_PROPERTY)
1408 act->prev_caller = NULL;
1409#endif
1410 act->curr_pc = NULL;
1411#if defined(DUK_USE_DEBUGGER_SUPPORT)
1412 act->prev_line = 0;
1413#endif
1414 act->idx_bottom = entry_valstack_bottom_index + idx_func + 2;
1415#if 0 /* topmost activation idx_retval is considered garbage, no need to init */
1416 act->idx_retval = 0;
1417#endif
1418 DUK_TVAL_SET_TVAL(&act->tv_func, tv_func); /* borrowed, no refcount */
1419
1420 /* XXX: remove the preventcount and make yield walk the callstack?
1421 * Or perhaps just use a single flag, not a counter, faster to just
1422 * set and restore?
1423 */
1424 if (act->flags & DUK_ACT_FLAG_PREVENT_YIELD) {
1425 /* duk_hthread_callstack_unwind() will decrease this on unwind */
1427 }
1428
1429 /* XXX: Is this INCREF necessary? 'func' is always a borrowed
1430 * reference reachable through the value stack? If changed, stack
1431 * unwind code also needs to be fixed to match.
1432 */
1433 DUK_HOBJECT_INCREF_ALLOWNULL(thr, func); /* act->func */
1434
1435#if defined(DUK_USE_NONSTD_FUNC_CALLER_PROPERTY)
1436 if (func) {
1437 duk__update_func_caller_prop(thr, func);
1438 }
1439 act = thr->callstack + thr->callstack_top - 1;
1440#endif
1441
1442 /* [ ... func this arg1 ... argN ] */
1443
1444 /*
1445 * Environment record creation and 'arguments' object creation.
1446 * Named function expression name binding is handled by the
1447 * compiler; the compiled function's parent env will contain
1448 * the (immutable) binding already.
1449 *
1450 * This handling is now identical for C and Ecmascript functions.
1451 * C functions always have the 'NEWENV' flag set, so their
1452 * environment record initialization is delayed (which is good).
1453 *
1454 * Delayed creation (on demand) is handled in duk_js_var.c.
1455 */
1456
1457 DUK_ASSERT(func == NULL || !DUK_HOBJECT_HAS_BOUND(func)); /* bound function chain has already been resolved */
1458
1459 if (DUK_LIKELY(func != NULL)) {
1462 /* Use a new environment but there's no 'arguments' object;
1463 * delayed environment initialization. This is the most
1464 * common case.
1465 */
1466 DUK_ASSERT(act->lex_env == NULL);
1467 DUK_ASSERT(act->var_env == NULL);
1468 } else {
1469 /* Use a new environment and there's an 'arguments' object.
1470 * We need to initialize it right now.
1471 */
1472
1473 /* third arg: absolute index (to entire valstack) of idx_bottom of new activation */
1475 DUK_ASSERT(env != NULL);
1476
1477 /* [ ... func this arg1 ... argN envobj ] */
1478
1480 duk__handle_createargs_for_call(thr, func, env, num_stack_args);
1481
1482 /* [ ... func this arg1 ... argN envobj ] */
1483
1484 act = thr->callstack + thr->callstack_top - 1;
1485 act->lex_env = env;
1486 act->var_env = env;
1487 DUK_HOBJECT_INCREF(thr, env);
1488 DUK_HOBJECT_INCREF(thr, env); /* XXX: incref by count (2) directly */
1489 duk_pop(ctx);
1490 }
1491 } else {
1492 /* Use existing env (e.g. for non-strict eval); cannot have
1493 * an own 'arguments' object (but can refer to an existing one).
1494 */
1495
1497
1498 duk__handle_oldenv_for_call(thr, func, act);
1499
1500 DUK_ASSERT(act->lex_env != NULL);
1501 DUK_ASSERT(act->var_env != NULL);
1502 }
1503 } else {
1504 /* Lightfuncs are always native functions and have "newenv". */
1505 DUK_ASSERT(act->lex_env == NULL);
1506 DUK_ASSERT(act->var_env == NULL);
1507 }
1508
1509 /* [ ... func this arg1 ... argN ] */
1510
1511 /*
1512 * Setup value stack: clamp to 'nargs', fill up to 'nregs'
1513 *
1514 * Value stack may either grow or shrink, depending on the
1515 * number of func registers and the number of actual arguments.
1516 * If nregs >= 0, func wants args clamped to 'nargs'; else it
1517 * wants all args (= 'num_stack_args').
1518 */
1519
1520 /* XXX: optimize value stack operation */
1521 /* XXX: don't want to shrink allocation here */
1522
1524 num_stack_args,
1525 idx_func + 2,
1526 nregs,
1527 nargs,
1528 func);
1529
1530 /*
1531 * Determine call type, then finalize activation, shift to
1532 * new value stack bottom, and call the target.
1533 */
1534
1535 if (func != NULL && DUK_HOBJECT_IS_COMPILEDFUNCTION(func)) {
1536 /*
1537 * Ecmascript call
1538 */
1539
1540 duk_tval *tv_ret;
1541 duk_tval *tv_funret;
1542
1543 DUK_ASSERT(func != NULL);
1546
1547 thr->valstack_bottom = thr->valstack_bottom + idx_func + 2;
1548 /* keep current valstack_top */
1549 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
1551 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
1552
1553 /* [ ... func this | arg1 ... argN ] ('this' must precede new bottom) */
1554
1555 /*
1556 * Bytecode executor call.
1557 *
1558 * Execute bytecode, handling any recursive function calls and
1559 * thread resumptions. Returns when execution would return from
1560 * the entry level activation. When the executor returns, a
1561 * single return value is left on the stack top.
1562 *
1563 * The only possible longjmp() is an error (DUK_LJ_TYPE_THROW),
1564 * other types are handled internally by the executor.
1565 */
1566
1567 /* thr->ptr_curr_pc is set by bytecode executor early on entry */
1568 DUK_ASSERT(thr->ptr_curr_pc == NULL);
1569 DUK_DDD(DUK_DDDPRINT("entering bytecode execution"));
1571 DUK_DDD(DUK_DDDPRINT("returned from bytecode execution"));
1572
1573 /* Unwind. */
1574
1575 DUK_ASSERT(thr->catchstack_top >= entry_catchstack_top); /* may need unwind */
1576 DUK_ASSERT(thr->callstack_top == entry_callstack_top + 1);
1577 DUK_ASSERT(thr->callstack_top == entry_callstack_top + 1);
1578 duk_hthread_catchstack_unwind(thr, entry_catchstack_top);
1580 duk_hthread_callstack_unwind(thr, entry_callstack_top);
1582
1583 thr->valstack_bottom = thr->valstack + entry_valstack_bottom_index;
1584 /* keep current valstack_top */
1585 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
1587 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
1588 DUK_ASSERT(thr->valstack_top - thr->valstack_bottom >= idx_func + 1);
1589
1590 /* Return value handling. */
1591
1592 /* [ ... func this (crud) retval ] */
1593
1594 tv_ret = thr->valstack_bottom + idx_func;
1595 tv_funret = thr->valstack_top - 1;
1596#if defined(DUK_USE_FASTINT)
1597 /* Explicit check for fastint downgrade. */
1598 DUK_TVAL_CHKFAST_INPLACE(tv_funret);
1599#endif
1600 DUK_TVAL_SET_TVAL_UPDREF(thr, tv_ret, tv_funret); /* side effects */
1601 } else {
1602 /*
1603 * Native call.
1604 */
1605
1606 duk_tval *tv_ret;
1607 duk_tval *tv_funret;
1608
1609 thr->valstack_bottom = thr->valstack_bottom + idx_func + 2;
1610 /* keep current valstack_top */
1611 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
1613 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
1614 DUK_ASSERT(func == NULL || ((duk_hnativefunction *) func)->func != NULL);
1615
1616 /* [ ... func this | arg1 ... argN ] ('this' must precede new bottom) */
1617
1618 /* For native calls must be NULL so we don't sync back */
1619 DUK_ASSERT(thr->ptr_curr_pc == NULL);
1620
1621 if (func) {
1622 rc = ((duk_hnativefunction *) func)->func((duk_context *) thr);
1623 } else {
1625 rc = funcptr((duk_context *) thr);
1626 }
1627
1628 /* Automatic error throwing, retval check. */
1629
1630 if (rc < 0) {
1633 } else if (rc > 1) {
1634 DUK_ERROR_API(thr, "c function returned invalid rc");
1635 }
1636 DUK_ASSERT(rc == 0 || rc == 1);
1637
1638 /* Unwind. */
1639
1640 DUK_ASSERT(thr->catchstack_top == entry_catchstack_top); /* no need to unwind */
1641 DUK_ASSERT(thr->callstack_top == entry_callstack_top + 1);
1642 duk_hthread_callstack_unwind(thr, entry_callstack_top);
1644
1645 thr->valstack_bottom = thr->valstack + entry_valstack_bottom_index;
1646 /* keep current valstack_top */
1647 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
1649 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
1650 DUK_ASSERT(thr->valstack_top - thr->valstack_bottom >= idx_func + 1);
1651
1652 /* Return value handling. */
1653
1654 /* XXX: should this happen in the callee's activation or after unwinding? */
1655 tv_ret = thr->valstack_bottom + idx_func;
1656 if (rc == 0) {
1657 DUK_TVAL_SET_UNDEFINED_UPDREF(thr, tv_ret); /* side effects */
1658 } else {
1659 /* [ ... func this (crud) retval ] */
1660 tv_funret = thr->valstack_top - 1;
1661#if defined(DUK_USE_FASTINT)
1662 /* Explicit check for fastint downgrade. */
1663 DUK_TVAL_CHKFAST_INPLACE(tv_funret);
1664#endif
1665 DUK_TVAL_SET_TVAL_UPDREF(thr, tv_ret, tv_funret); /* side effects */
1666 }
1667 }
1668
1669 duk_set_top(ctx, idx_func + 1); /* XXX: unnecessary, handle in adjust */
1670
1671 /* [ ... retval ] */
1672
1673 /* Ensure there is internal valstack spare before we exit; this may
1674 * throw an alloc error. The same guaranteed size must be available
1675 * as before the call. This is not optimal now: we store the valstack
1676 * allocated size during entry; this value may be higher than the
1677 * minimal guarantee for an application.
1678 */
1679
1680 /* XXX: we should never shrink here; when we error out later, we'd
1681 * need to potentially grow the value stack in error unwind which could
1682 * cause another error.
1683 */
1684
1685 (void) duk_valstack_resize_raw((duk_context *) thr,
1686 entry_valstack_end, /* same as during entry */
1687 DUK_VSRESIZE_FLAG_SHRINK | /* flags */
1690
1691 /* Restore entry thread executor curr_pc stack frame pointer. */
1692 thr->ptr_curr_pc = entry_ptr_curr_pc;
1693
1694 DUK_HEAP_SWITCH_THREAD(thr->heap, entry_curr_thread); /* may be NULL */
1695 thr->state = (duk_uint8_t) entry_thread_state;
1696
1697 DUK_ASSERT((thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread == NULL) || /* first call */
1698 (thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread != NULL) || /* other call */
1699 (thr->state == DUK_HTHREAD_STATE_RUNNING && thr->heap->curr_thread == thr)); /* current thread */
1700
1701 thr->heap->call_recursion_depth = entry_call_recursion_depth;
1702
1703 /* If the debugger is active we need to force an interrupt so that
1704 * debugger breakpoints are rechecked. This is important for function
1705 * calls caused by side effects (e.g. when doing a DUK_OP_GETPROP), see
1706 * GH-303. Only needed for success path, error path always causes a
1707 * breakpoint recheck in the executor. It would be enough to set this
1708 * only when returning to an Ecmascript activation, but setting the flag
1709 * on every return should have no ill effect.
1710 */
1711#if defined(DUK_USE_DEBUGGER_SUPPORT)
1712 if (DUK_HEAP_IS_DEBUGGER_ATTACHED(thr->heap)) {
1713 DUK_DD(DUK_DDPRINT("returning with debugger enabled, force interrupt"));
1714 DUK_ASSERT(thr->interrupt_counter <= thr->interrupt_init);
1715 thr->interrupt_init -= thr->interrupt_counter;
1716 thr->interrupt_counter = 0;
1717 thr->heap->dbg_force_restart = 1;
1718 }
1719#endif
1720
1721#if defined(DUK_USE_INTERRUPT_COUNTER) && defined(DUK_USE_DEBUG)
1722 duk__interrupt_fixup(thr, entry_curr_thread);
1723#endif
1724
1725 return;
1726
1727 thread_state_error:
1728 DUK_ERROR_FMT1(thr, DUK_ERR_TYPE_ERROR, "invalid thread state for call (%ld)", (long) thr->state);
1730 return; /* never executed */
1731}
1732
1734 duk_size_t entry_valstack_bottom_index,
1735 duk_size_t entry_valstack_end,
1736 duk_size_t entry_catchstack_top,
1737 duk_size_t entry_callstack_top,
1738 duk_int_t entry_call_recursion_depth,
1739 duk_hthread *entry_curr_thread,
1740 duk_uint_fast8_t entry_thread_state,
1741 duk_instr_t **entry_ptr_curr_pc,
1742 duk_idx_t idx_func,
1743 duk_jmpbuf *old_jmpbuf_ptr) {
1744 duk_context *ctx;
1745 duk_tval *tv_ret;
1746
1747 ctx = (duk_context *) thr;
1748
1749 DUK_DDD(DUK_DDDPRINT("error caught during duk__handle_call_inner(): %!T",
1750 (duk_tval *) &thr->heap->lj.value1));
1751
1752 /* Other longjmp types are handled by executor before propagating
1753 * the error here.
1754 */
1756 DUK_ASSERT(thr->callstack_top >= entry_callstack_top);
1757 DUK_ASSERT(thr->catchstack_top >= entry_catchstack_top);
1758
1759 /* We don't need to sync back thr->ptr_curr_pc here because
1760 * the bytecode executor always has a setjmp catchpoint which
1761 * does that before errors propagate to here.
1762 */
1763 DUK_ASSERT(thr->ptr_curr_pc == NULL);
1764
1765 /* Restore the previous setjmp catcher so that any error in
1766 * error handling will propagate outwards rather than re-enter
1767 * the same handler. However, the error handling path must be
1768 * designed to be error free so that sandboxing guarantees are
1769 * reliable, see e.g. https://github.com/svaarala/duktape/issues/476.
1770 */
1771 thr->heap->lj.jmpbuf_ptr = old_jmpbuf_ptr;
1772
1773 /* XXX: callstack unwind may now throw an error when closing
1774 * scopes; this is a sandboxing issue, described in:
1775 * https://github.com/svaarala/duktape/issues/476
1776 */
1777 duk_hthread_catchstack_unwind(thr, entry_catchstack_top);
1779 duk_hthread_callstack_unwind(thr, entry_callstack_top);
1781
1782 thr->valstack_bottom = thr->valstack + entry_valstack_bottom_index;
1783 tv_ret = thr->valstack_bottom + idx_func; /* XXX: byte offset? */
1784 DUK_TVAL_SET_TVAL_UPDREF(thr, tv_ret, &thr->heap->lj.value1); /* side effects */
1785#if defined(DUK_USE_FASTINT)
1786 /* Explicit check for fastint downgrade. */
1788#endif
1789 duk_set_top(ctx, idx_func + 1); /* XXX: could be eliminated with valstack adjust */
1790
1791 /* [ ... errobj ] */
1792
1793 /* Ensure there is internal valstack spare before we exit; this may
1794 * throw an alloc error. The same guaranteed size must be available
1795 * as before the call. This is not optimal now: we store the valstack
1796 * allocated size during entry; this value may be higher than the
1797 * minimal guarantee for an application.
1798 */
1799
1800 /* XXX: this needs to be reworked so that we never shrink the value
1801 * stack on function entry so that we never need to grow it here.
1802 * Needing to grow here is a sandboxing issue because we need to
1803 * allocate which may cause an error in the error handling path
1804 * and thus propagate an error out of a protected call.
1805 */
1806
1807 (void) duk_valstack_resize_raw((duk_context *) thr,
1808 entry_valstack_end, /* same as during entry */
1809 DUK_VSRESIZE_FLAG_SHRINK | /* flags */
1812
1813
1814 /* These are just convenience "wiping" of state. Side effects should
1815 * not be an issue here: thr->heap and thr->heap->lj have a stable
1816 * pointer. Finalizer runs etc capture even out-of-memory errors so
1817 * nothing should throw here.
1818 */
1820 thr->heap->lj.iserror = 0;
1821 DUK_TVAL_SET_UNDEFINED_UPDREF(thr, &thr->heap->lj.value1); /* side effects */
1822 DUK_TVAL_SET_UNDEFINED_UPDREF(thr, &thr->heap->lj.value2); /* side effects */
1823
1824 /* Restore entry thread executor curr_pc stack frame pointer. */
1825 thr->ptr_curr_pc = entry_ptr_curr_pc;
1826
1827 DUK_HEAP_SWITCH_THREAD(thr->heap, entry_curr_thread); /* may be NULL */
1828 thr->state = (duk_uint8_t) entry_thread_state;
1829
1830 DUK_ASSERT((thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread == NULL) || /* first call */
1831 (thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread != NULL) || /* other call */
1832 (thr->state == DUK_HTHREAD_STATE_RUNNING && thr->heap->curr_thread == thr)); /* current thread */
1833
1834 thr->heap->call_recursion_depth = entry_call_recursion_depth;
1835
1836 /* If the debugger is active we need to force an interrupt so that
1837 * debugger breakpoints are rechecked. This is important for function
1838 * calls caused by side effects (e.g. when doing a DUK_OP_GETPROP), see
1839 * GH-303. Only needed for success path, error path always causes a
1840 * breakpoint recheck in the executor. It would be enough to set this
1841 * only when returning to an Ecmascript activation, but setting the flag
1842 * on every return should have no ill effect.
1843 */
1844#if defined(DUK_USE_DEBUGGER_SUPPORT)
1845 if (DUK_HEAP_IS_DEBUGGER_ATTACHED(thr->heap)) {
1846 DUK_DD(DUK_DDPRINT("returning with debugger enabled, force interrupt"));
1847 DUK_ASSERT(thr->interrupt_counter <= thr->interrupt_init);
1848 thr->interrupt_init -= thr->interrupt_counter;
1849 thr->interrupt_counter = 0;
1850 thr->heap->dbg_force_restart = 1;
1851 }
1852#endif
1853
1854#if defined(DUK_USE_INTERRUPT_COUNTER) && defined(DUK_USE_DEBUG)
1855 duk__interrupt_fixup(thr, entry_curr_thread);
1856#endif
1857}
1858
1859/*
1860 * duk_handle_safe_call(): make a "C protected call" within the
1861 * current activation.
1862 *
1863 * The allowed thread states for making a call are the same as for
1864 * duk_handle_call_xxx().
1865 *
1866 * Error handling is similar to duk_handle_call_xxx(); errors may be thrown
1867 * (and result in a fatal error) for insane arguments.
1868 */
1869
1870/* XXX: bump preventcount by one for the duration of this call? */
1871
1874 duk_idx_t num_stack_args,
1875 duk_idx_t num_stack_rets) {
1876 duk_context *ctx = (duk_context *) thr;
1877 duk_size_t entry_valstack_bottom_index;
1878 duk_size_t entry_callstack_top;
1879 duk_size_t entry_catchstack_top;
1880 duk_int_t entry_call_recursion_depth;
1881 duk_hthread *entry_curr_thread;
1882 duk_uint_fast8_t entry_thread_state;
1883 duk_instr_t **entry_ptr_curr_pc;
1884 duk_jmpbuf *old_jmpbuf_ptr = NULL;
1885 duk_jmpbuf our_jmpbuf;
1886 duk_idx_t idx_retbase;
1887 duk_int_t retval;
1888
1889 DUK_ASSERT(thr != NULL);
1890 DUK_ASSERT(ctx != NULL);
1891
1892 /* Note: careful with indices like '-x'; if 'x' is zero, it refers to bottom */
1893 entry_valstack_bottom_index = (duk_size_t) (thr->valstack_bottom - thr->valstack);
1894 entry_callstack_top = thr->callstack_top;
1895 entry_catchstack_top = thr->catchstack_top;
1896 entry_call_recursion_depth = thr->heap->call_recursion_depth;
1897 entry_curr_thread = thr->heap->curr_thread; /* Note: may be NULL if first call */
1898 entry_thread_state = thr->state;
1899 entry_ptr_curr_pc = thr->ptr_curr_pc; /* may be NULL */
1900 idx_retbase = duk_get_top(ctx) - num_stack_args; /* Note: not a valid stack index if num_stack_args == 0 */
1901
1902 /* Note: cannot portably debug print a function pointer, hence 'func' not printed! */
1903 DUK_DD(DUK_DDPRINT("duk_handle_safe_call: thr=%p, num_stack_args=%ld, num_stack_rets=%ld, "
1904 "valstack_top=%ld, idx_retbase=%ld, rec_depth=%ld/%ld, "
1905 "entry_valstack_bottom_index=%ld, entry_callstack_top=%ld, entry_catchstack_top=%ld, "
1906 "entry_call_recursion_depth=%ld, entry_curr_thread=%p, entry_thread_state=%ld",
1907 (void *) thr,
1908 (long) num_stack_args,
1909 (long) num_stack_rets,
1910 (long) duk_get_top(ctx),
1911 (long) idx_retbase,
1912 (long) thr->heap->call_recursion_depth,
1913 (long) thr->heap->call_recursion_limit,
1914 (long) entry_valstack_bottom_index,
1915 (long) entry_callstack_top,
1916 (long) entry_catchstack_top,
1917 (long) entry_call_recursion_depth,
1918 (void *) entry_curr_thread,
1919 (long) entry_thread_state));
1920
1921 if (idx_retbase < 0) {
1922 /* Since stack indices are not reliable, we can't do anything useful
1923 * here. Invoke the existing setjmp catcher, or if it doesn't exist,
1924 * call the fatal error handler.
1925 */
1926
1928 }
1929
1930 /* setjmp catchpoint setup */
1931
1932 old_jmpbuf_ptr = thr->heap->lj.jmpbuf_ptr;
1933 thr->heap->lj.jmpbuf_ptr = &our_jmpbuf;
1934
1935#if defined(DUK_USE_CPP_EXCEPTIONS)
1936 try {
1937#else
1938 DUK_ASSERT(thr->heap->lj.jmpbuf_ptr == &our_jmpbuf);
1939 if (DUK_SETJMP(our_jmpbuf.jb) == 0) {
1940 /* Success path. */
1941#endif
1942 DUK_DDD(DUK_DDDPRINT("safe_call setjmp catchpoint setup complete"));
1943
1945 func,
1946 idx_retbase,
1947 num_stack_rets,
1948 entry_valstack_bottom_index,
1949 entry_callstack_top,
1950 entry_catchstack_top);
1951
1952 /* Longjmp state is kept clean in success path */
1954 DUK_ASSERT(thr->heap->lj.iserror == 0);
1957
1958 /* Note: either pointer may be NULL (at entry), so don't assert */
1959 thr->heap->lj.jmpbuf_ptr = old_jmpbuf_ptr;
1960
1961 retval = DUK_EXEC_SUCCESS;
1962#if defined(DUK_USE_CPP_EXCEPTIONS)
1963 } catch (duk_internal_exception &exc) {
1964 DUK_UNREF(exc);
1965#else
1966 } else {
1967 /* Error path. */
1968#endif
1970 idx_retbase,
1971 num_stack_rets,
1972 entry_valstack_bottom_index,
1973 entry_callstack_top,
1974 entry_catchstack_top,
1975 old_jmpbuf_ptr);
1976
1977 /* Longjmp state is cleaned up by error handling */
1979 DUK_ASSERT(thr->heap->lj.iserror == 0);
1982
1983 retval = DUK_EXEC_ERROR;
1984 }
1985#if defined(DUK_USE_CPP_EXCEPTIONS)
1986 catch (std::exception &exc) {
1987 const char *what = exc.what();
1988 if (!what) {
1989 what = "unknown";
1990 }
1991 DUK_D(DUK_DPRINT("unexpected c++ std::exception (perhaps thrown by user code)"));
1992 try {
1993 DUK_ERROR_FMT1(thr, DUK_ERR_API_ERROR, "caught invalid c++ std::exception '%s' (perhaps thrown by user code)", what);
1994 } catch (duk_internal_exception exc) {
1995 DUK_D(DUK_DPRINT("caught api error thrown from unexpected c++ std::exception"));
1996 DUK_UNREF(exc);
1998 idx_retbase,
1999 num_stack_rets,
2000 entry_valstack_bottom_index,
2001 entry_callstack_top,
2002 entry_catchstack_top,
2003 old_jmpbuf_ptr);
2004 retval = DUK_EXEC_ERROR;
2005 }
2006 } catch (...) {
2007 DUK_D(DUK_DPRINT("unexpected c++ exception (perhaps thrown by user code)"));
2008 try {
2009 DUK_ERROR_API(thr, "caught invalid c++ exception (perhaps thrown by user code)");
2010 } catch (duk_internal_exception exc) {
2011 DUK_D(DUK_DPRINT("caught api error thrown from unexpected c++ exception"));
2012 DUK_UNREF(exc);
2014 idx_retbase,
2015 num_stack_rets,
2016 entry_valstack_bottom_index,
2017 entry_callstack_top,
2018 entry_catchstack_top,
2019 old_jmpbuf_ptr);
2020 retval = DUK_EXEC_ERROR;
2021 }
2022 }
2023#endif
2024
2025 DUK_ASSERT(thr->heap->lj.jmpbuf_ptr == old_jmpbuf_ptr); /* success/error path both do this */
2026
2028 idx_retbase,
2029 num_stack_rets,
2030 entry_call_recursion_depth,
2031 entry_curr_thread,
2032 entry_thread_state,
2033 entry_ptr_curr_pc);
2034
2035 return retval;
2036}
2037
2040 duk_idx_t idx_retbase,
2041 duk_idx_t num_stack_rets,
2042 duk_size_t entry_valstack_bottom_index,
2043 duk_size_t entry_callstack_top,
2044 duk_size_t entry_catchstack_top) {
2045 duk_context *ctx;
2046 duk_ret_t rc;
2047
2048 DUK_ASSERT(thr != NULL);
2049 ctx = (duk_context *) thr;
2051 DUK_UNREF(entry_valstack_bottom_index);
2052 DUK_UNREF(entry_callstack_top);
2053 DUK_UNREF(entry_catchstack_top);
2054
2055 /*
2056 * Thread state check and book-keeping.
2057 */
2058
2059 if (thr == thr->heap->curr_thread) {
2060 /* same thread */
2061 if (thr->state != DUK_HTHREAD_STATE_RUNNING) {
2062 /* should actually never happen, but check anyway */
2063 goto thread_state_error;
2064 }
2065 } else {
2066 /* different thread */
2067 DUK_ASSERT(thr->heap->curr_thread == NULL ||
2069 if (thr->state != DUK_HTHREAD_STATE_INACTIVE) {
2070 goto thread_state_error;
2071 }
2072 DUK_HEAP_SWITCH_THREAD(thr->heap, thr);
2074
2075 /* Note: multiple threads may be simultaneously in the RUNNING
2076 * state, but not in the same "resume chain".
2077 */
2078 }
2079
2080 DUK_ASSERT(thr->heap->curr_thread == thr);
2082
2083 /*
2084 * Recursion limit check.
2085 *
2086 * Note: there is no need for an "ignore recursion limit" flag
2087 * for duk_handle_safe_call now.
2088 */
2089
2093 /* XXX: error message is a bit misleading: we reached a recursion
2094 * limit which is also essentially the same as a C callstack limit
2095 * (except perhaps with some relaxed threading assumptions).
2096 */
2098 }
2099 thr->heap->call_recursion_depth++;
2100
2101 /*
2102 * Valstack spare check
2103 */
2104
2105 duk_require_stack(ctx, 0); /* internal spare */
2106
2107 /*
2108 * Make the C call
2109 */
2110
2111 rc = func(ctx);
2112
2113 DUK_DDD(DUK_DDDPRINT("safe_call, func rc=%ld", (long) rc));
2114
2115 /*
2116 * Valstack manipulation for results.
2117 */
2118
2119 /* we're running inside the caller's activation, so no change in call/catch stack or valstack bottom */
2120 DUK_ASSERT(thr->callstack_top == entry_callstack_top);
2121 DUK_ASSERT(thr->catchstack_top == entry_catchstack_top);
2122 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
2123 DUK_ASSERT((duk_size_t) (thr->valstack_bottom - thr->valstack) == entry_valstack_bottom_index);
2125 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
2126
2127 if (rc < 0) {
2129 }
2130 DUK_ASSERT(rc >= 0);
2131
2132 if (duk_get_top(ctx) < rc) {
2133 DUK_ERROR_API(thr, "not enough stack values for safe_call rc");
2134 }
2135
2136 DUK_ASSERT(thr->catchstack_top == entry_catchstack_top); /* no need to unwind */
2137 DUK_ASSERT(thr->callstack_top == entry_callstack_top);
2138
2139 duk__safe_call_adjust_valstack(thr, idx_retbase, num_stack_rets, rc);
2140 return;
2141
2142 thread_state_error:
2143 DUK_ERROR_FMT1(thr, DUK_ERR_TYPE_ERROR, "invalid thread state for safe_call (%ld)", (long) thr->state);
2145}
2146
2148 duk_idx_t idx_retbase,
2149 duk_idx_t num_stack_rets,
2150 duk_size_t entry_valstack_bottom_index,
2151 duk_size_t entry_callstack_top,
2152 duk_size_t entry_catchstack_top,
2153 duk_jmpbuf *old_jmpbuf_ptr) {
2154 duk_context *ctx;
2155
2156 DUK_ASSERT(thr != NULL);
2157 ctx = (duk_context *) thr;
2159
2160 /*
2161 * Error during call. The error value is at heap->lj.value1.
2162 *
2163 * The very first thing we do is restore the previous setjmp catcher.
2164 * This means that any error in error handling will propagate outwards
2165 * instead of causing a setjmp() re-entry above.
2166 */
2167
2168 DUK_DDD(DUK_DDDPRINT("error caught during protected duk_handle_safe_call()"));
2169
2170 /* Other longjmp types are handled by executor before propagating
2171 * the error here.
2172 */
2174 DUK_ASSERT(thr->callstack_top >= entry_callstack_top);
2175 DUK_ASSERT(thr->catchstack_top >= entry_catchstack_top);
2176
2177 /* Note: either pointer may be NULL (at entry), so don't assert. */
2178 thr->heap->lj.jmpbuf_ptr = old_jmpbuf_ptr;
2179
2180 DUK_ASSERT(thr->catchstack_top >= entry_catchstack_top);
2181 DUK_ASSERT(thr->callstack_top >= entry_callstack_top);
2182 duk_hthread_catchstack_unwind(thr, entry_catchstack_top);
2184 duk_hthread_callstack_unwind(thr, entry_callstack_top);
2186 thr->valstack_bottom = thr->valstack + entry_valstack_bottom_index;
2187
2188 /* [ ... | (crud) ] */
2189
2190 /* XXX: space in valstack? see discussion in duk_handle_call_xxx(). */
2191 duk_push_tval(ctx, &thr->heap->lj.value1);
2192
2193 /* [ ... | (crud) errobj ] */
2194
2195 DUK_ASSERT(duk_get_top(ctx) >= 1); /* at least errobj must be on stack */
2196
2197 /* check that the valstack has space for the final amount and any
2198 * intermediate space needed; this is unoptimal but should be safe
2199 */
2200 duk_require_stack_top(ctx, idx_retbase + num_stack_rets); /* final configuration */
2201 duk_require_stack(ctx, num_stack_rets);
2202
2203 duk__safe_call_adjust_valstack(thr, idx_retbase, num_stack_rets, 1); /* 1 = num actual 'return values' */
2204
2205 /* [ ... | ] or [ ... | errobj (M * undefined)] where M = num_stack_rets - 1 */
2206
2207 /* These are just convenience "wiping" of state. Side effects should
2208 * not be an issue here: thr->heap and thr->heap->lj have a stable
2209 * pointer. Finalizer runs etc capture even out-of-memory errors so
2210 * nothing should throw here.
2211 */
2213 thr->heap->lj.iserror = 0;
2214 DUK_TVAL_SET_UNDEFINED_UPDREF(thr, &thr->heap->lj.value1); /* side effects */
2215 DUK_TVAL_SET_UNDEFINED_UPDREF(thr, &thr->heap->lj.value2); /* side effects */
2216}
2217
2219 duk_idx_t idx_retbase,
2220 duk_idx_t num_stack_rets,
2221 duk_int_t entry_call_recursion_depth,
2222 duk_hthread *entry_curr_thread,
2223 duk_uint_fast8_t entry_thread_state,
2224 duk_instr_t **entry_ptr_curr_pc) {
2225 duk_context *ctx;
2226
2227 DUK_ASSERT(thr != NULL);
2228 ctx = (duk_context *) thr;
2230 DUK_UNREF(ctx);
2231 DUK_UNREF(idx_retbase);
2232 DUK_UNREF(num_stack_rets);
2233
2234 /* Restore entry thread executor curr_pc stack frame pointer. */
2235 thr->ptr_curr_pc = entry_ptr_curr_pc;
2236
2237 /* XXX: because we unwind stacks above, thr->heap->curr_thread is at
2238 * risk of pointing to an already freed thread. This was indeed the
2239 * case in test-bug-multithread-valgrind.c, until duk_handle_call()
2240 * was fixed to restore thr->heap->curr_thread before rethrowing an
2241 * uncaught error.
2242 */
2243 DUK_HEAP_SWITCH_THREAD(thr->heap, entry_curr_thread); /* may be NULL */
2244 thr->state = (duk_uint8_t) entry_thread_state;
2245
2246 DUK_ASSERT((thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread == NULL) || /* first call */
2247 (thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread != NULL) || /* other call */
2248 (thr->state == DUK_HTHREAD_STATE_RUNNING && thr->heap->curr_thread == thr)); /* current thread */
2249
2250 thr->heap->call_recursion_depth = entry_call_recursion_depth;
2251
2252 /* stack discipline consistency check */
2253 DUK_ASSERT(duk_get_top(ctx) == idx_retbase + num_stack_rets);
2254
2255 /* A debugger forced interrupt check is not needed here, as
2256 * problematic safe calls are not caused by side effects.
2257 */
2258
2259#if defined(DUK_USE_INTERRUPT_COUNTER) && defined(DUK_USE_DEBUG)
2260 duk__interrupt_fixup(thr, entry_curr_thread);
2261#endif
2262}
2263
2264/*
2265 * Helper for handling an Ecmascript-to-Ecmascript call or an Ecmascript
2266 * function (initial) Duktape.Thread.resume().
2267 *
2268 * Compared to normal calls handled by duk_handle_call(), there are a
2269 * bunch of differences:
2270 *
2271 * - the call is never protected
2272 * - there is no C recursion depth increase (hence an "ignore recursion
2273 * limit" flag is not applicable)
2274 * - instead of making the call, this helper just performs the thread
2275 * setup and returns; the bytecode executor then restarts execution
2276 * internally
2277 * - ecmascript functions are never 'vararg' functions (they access
2278 * varargs through the 'arguments' object)
2279 *
2280 * The callstack of the target contains an earlier Ecmascript call in case
2281 * of an Ecmascript-to-Ecmascript call (whose idx_retval is updated), or
2282 * is empty in case of an initial Duktape.Thread.resume().
2283 *
2284 * The first thing to do here is to figure out whether an ecma-to-ecma
2285 * call is actually possible. It's not always the case if the target is
2286 * a bound function; the final function may be native. In that case,
2287 * return an error so caller can fall back to a normal call path.
2288 */
2289
2291 duk_idx_t num_stack_args,
2292 duk_small_uint_t call_flags) {
2293 duk_context *ctx = (duk_context *) thr;
2294 duk_size_t entry_valstack_bottom_index;
2295 duk_idx_t idx_func; /* valstack index of 'func' and retval (relative to entry valstack_bottom) */
2296 duk_idx_t idx_args; /* valstack index of start of args (arg1) (relative to entry valstack_bottom) */
2297 duk_idx_t nargs; /* # argument registers target function wants (< 0 => never for ecma calls) */
2298 duk_idx_t nregs; /* # total registers target function wants on entry (< 0 => never for ecma calls) */
2299 duk_hobject *func; /* 'func' on stack (borrowed reference) */
2300 duk_tval *tv_func; /* duk_tval ptr for 'func' on stack (borrowed reference) */
2301 duk_activation *act;
2302 duk_hobject *env;
2303 duk_bool_t use_tailcall;
2304 duk_instr_t **entry_ptr_curr_pc;
2305
2306 DUK_ASSERT(thr != NULL);
2307 DUK_ASSERT(ctx != NULL);
2308 DUK_ASSERT(!((call_flags & DUK_CALL_FLAG_IS_RESUME) != 0 && (call_flags & DUK_CALL_FLAG_IS_TAILCALL) != 0));
2309
2310 /* XXX: assume these? */
2311 DUK_ASSERT(thr->valstack != NULL);
2312 DUK_ASSERT(thr->callstack != NULL);
2313 DUK_ASSERT(thr->catchstack != NULL);
2314
2315 /* no need to handle thread state book-keeping here */
2316 DUK_ASSERT((call_flags & DUK_CALL_FLAG_IS_RESUME) != 0 ||
2318 thr->heap->curr_thread == thr));
2319
2320 /* If thr->ptr_curr_pc is set, sync curr_pc to act->pc. Then NULL
2321 * thr->ptr_curr_pc so that it's not accidentally used with an incorrect
2322 * activation when side effects occur. If we end up not making the
2323 * call we must restore the value.
2324 */
2325 entry_ptr_curr_pc = thr->ptr_curr_pc;
2327
2328 /* if a tail call:
2329 * - an Ecmascript activation must be on top of the callstack
2330 * - there cannot be any active catchstack entries
2331 */
2332#if defined(DUK_USE_ASSERTIONS)
2333 if (call_flags & DUK_CALL_FLAG_IS_TAILCALL) {
2334 duk_size_t our_callstack_index;
2335 duk_size_t i;
2336
2337 DUK_ASSERT(thr->callstack_top >= 1);
2338 our_callstack_index = thr->callstack_top - 1;
2339 DUK_ASSERT_DISABLE(our_callstack_index >= 0);
2340 DUK_ASSERT(our_callstack_index < thr->callstack_size);
2341 DUK_ASSERT(DUK_ACT_GET_FUNC(thr->callstack + our_callstack_index) != NULL);
2343
2344 /* No entry in the catchstack which would actually catch a
2345 * throw can refer to the callstack entry being reused.
2346 * There *can* be catchstack entries referring to the current
2347 * callstack entry as long as they don't catch (e.g. label sites).
2348 */
2349
2350 for (i = 0; i < thr->catchstack_top; i++) {
2351 DUK_ASSERT(thr->catchstack[i].callstack_index < our_callstack_index || /* refer to callstack entries below current */
2352 DUK_CAT_GET_TYPE(thr->catchstack + i) == DUK_CAT_TYPE_LABEL); /* or a non-catching entry */
2353 }
2354 }
2355#endif /* DUK_USE_ASSERTIONS */
2356
2357 entry_valstack_bottom_index = (duk_size_t) (thr->valstack_bottom - thr->valstack);
2358 /* XXX: rework */
2359 idx_func = duk_normalize_index(thr, -num_stack_args - 2);
2360 idx_args = idx_func + 2;
2361
2362 DUK_DD(DUK_DDPRINT("handle_ecma_call_setup: thr=%p, "
2363 "num_stack_args=%ld, call_flags=0x%08lx (resume=%ld, tailcall=%ld), "
2364 "idx_func=%ld, idx_args=%ld, entry_valstack_bottom_index=%ld",
2365 (void *) thr,
2366 (long) num_stack_args,
2367 (unsigned long) call_flags,
2368 (long) ((call_flags & DUK_CALL_FLAG_IS_RESUME) != 0 ? 1 : 0),
2369 (long) ((call_flags & DUK_CALL_FLAG_IS_TAILCALL) != 0 ? 1 : 0),
2370 (long) idx_func,
2371 (long) idx_args,
2372 (long) entry_valstack_bottom_index));
2373
2374 if (DUK_UNLIKELY(idx_func < 0 || idx_args < 0)) {
2375 /* XXX: assert? compiler is responsible for this never happening */
2377 }
2378
2379 /*
2380 * Check the function type, handle bound function chains, and prepare
2381 * parameters for the rest of the call handling. Also figure out the
2382 * effective 'this' binding, which replaces the current value at
2383 * idx_func + 1.
2384 *
2385 * If the target function is a 'bound' one, follow the chain of 'bound'
2386 * functions until a non-bound function is found. During this process,
2387 * bound arguments are 'prepended' to existing ones, and the "this"
2388 * binding is overridden. See E5 Section 15.3.4.5.1.
2389 *
2390 * If the final target function cannot be handled by an ecma-to-ecma
2391 * call, return to the caller with a return value indicating this case.
2392 * The bound chain is resolved and the caller can resume with a plain
2393 * function call.
2394 */
2395
2396 func = duk__nonbound_func_lookup(ctx, idx_func, &num_stack_args, &tv_func, call_flags);
2397 if (func == NULL || !DUK_HOBJECT_IS_COMPILEDFUNCTION(func)) {
2398 DUK_DDD(DUK_DDDPRINT("final target is a lightfunc/nativefunc, cannot do ecma-to-ecma call"));
2399 thr->ptr_curr_pc = entry_ptr_curr_pc;
2400 return 0;
2401 }
2402 /* XXX: tv_func is not actually needed */
2403
2404 DUK_ASSERT(func != NULL);
2407
2408 duk__coerce_effective_this_binding(thr, func, idx_func + 1);
2409 DUK_DDD(DUK_DDDPRINT("effective 'this' binding is: %!T",
2410 duk_get_tval(ctx, idx_func + 1)));
2411
2412 nargs = ((duk_hcompiledfunction *) func)->nargs;
2413 nregs = ((duk_hcompiledfunction *) func)->nregs;
2414 DUK_ASSERT(nregs >= nargs);
2415
2416 /* [ ... func this arg1 ... argN ] */
2417
2418 /*
2419 * Preliminary activation record and valstack manipulation.
2420 * The concrete actions depend on whether the we're dealing
2421 * with a tail call (reuse an existing activation), a resume,
2422 * or a normal call.
2423 *
2424 * The basic actions, in varying order, are:
2425 *
2426 * - Check stack size for call handling
2427 * - Grow call stack if necessary (non-tail-calls)
2428 * - Update current activation (idx_retval) if necessary
2429 * (non-tail, non-resume calls)
2430 * - Move start of args (idx_args) to valstack bottom
2431 * (tail calls)
2432 *
2433 * Don't touch valstack_bottom or valstack_top yet so that Duktape API
2434 * calls work normally.
2435 */
2436
2437 /* XXX: some overlapping code; cleanup */
2438 use_tailcall = call_flags & DUK_CALL_FLAG_IS_TAILCALL;
2439#if !defined(DUK_USE_TAILCALL)
2440 DUK_ASSERT(use_tailcall == 0); /* compiler ensures this */
2441#endif
2442 if (use_tailcall) {
2443 /* tailcall cannot be flagged to resume calls, and a
2444 * previous frame must exist
2445 */
2446 DUK_ASSERT(thr->callstack_top >= 1);
2447 DUK_ASSERT((call_flags & DUK_CALL_FLAG_IS_RESUME) == 0);
2448
2449 act = thr->callstack + thr->callstack_top - 1;
2450 if (act->flags & DUK_ACT_FLAG_PREVENT_YIELD) {
2451 /* See: test-bug-tailcall-preventyield-assert.c. */
2452 DUK_DDD(DUK_DDDPRINT("tail call prevented by current activation having DUK_ACT_FLAG_PREVENTYIELD"));
2453 use_tailcall = 0;
2454 } else if (DUK_HOBJECT_HAS_NOTAIL(func)) {
2455 DUK_D(DUK_DPRINT("tail call prevented by function having a notail flag"));
2456 use_tailcall = 0;
2457 }
2458 }
2459
2460 if (use_tailcall) {
2461 duk_tval *tv1, *tv2;
2462 duk_size_t cs_index;
2463 duk_int_t i_stk; /* must be signed for loop structure */
2464 duk_idx_t i_arg;
2465
2466 /*
2467 * Tailcall handling
2468 *
2469 * Although the callstack entry is reused, we need to explicitly unwind
2470 * the current activation (or simulate an unwind). In particular, the
2471 * current activation must be closed, otherwise something like
2472 * test-bug-reduce-judofyr.js results. Also catchstack needs be unwound
2473 * because there may be non-error-catching label entries in valid tail calls.
2474 */
2475
2476 DUK_DDD(DUK_DDDPRINT("is tail call, reusing activation at callstack top, at index %ld",
2477 (long) (thr->callstack_top - 1)));
2478
2479 /* 'act' already set above */
2480
2485
2486 /* Unwind catchstack entries referring to the callstack entry we're reusing */
2487 cs_index = thr->callstack_top - 1;
2488 DUK_ASSERT(thr->catchstack_top <= DUK_INT_MAX); /* catchstack limits */
2489 for (i_stk = (duk_int_t) (thr->catchstack_top - 1); i_stk >= 0; i_stk--) {
2490 duk_catcher *cat = thr->catchstack + i_stk;
2491 if (cat->callstack_index != cs_index) {
2492 /* 'i' is the first entry we'll keep */
2493 break;
2494 }
2495 }
2496 duk_hthread_catchstack_unwind(thr, i_stk + 1);
2497
2498 /* Unwind the topmost callstack entry before reusing it */
2499 DUK_ASSERT(thr->callstack_top > 0);
2501
2502 /* Then reuse the unwound activation; callstack was not shrunk so there is always space */
2503 thr->callstack_top++;
2505 act = thr->callstack + thr->callstack_top - 1;
2506
2507 /* Start filling in the activation */
2508 act->func = func; /* don't want an intermediate exposed state with func == NULL */
2509#if defined(DUK_USE_NONSTD_FUNC_CALLER_PROPERTY)
2510 act->prev_caller = NULL;
2511#endif
2512 DUK_ASSERT(func != NULL);
2514 /* don't want an intermediate exposed state with invalid pc */
2516#if defined(DUK_USE_DEBUGGER_SUPPORT)
2517 act->prev_line = 0;
2518#endif
2519 DUK_TVAL_SET_OBJECT(&act->tv_func, func); /* borrowed, no refcount */
2520#if defined(DUK_USE_REFERENCE_COUNTING)
2521 DUK_HOBJECT_INCREF(thr, func);
2522 act = thr->callstack + thr->callstack_top - 1; /* side effects (currently none though) */
2523#endif
2524
2525#if defined(DUK_USE_NONSTD_FUNC_CALLER_PROPERTY)
2526#if defined(DUK_USE_TAILCALL)
2527#error incorrect options: tail calls enabled with function caller property
2528#endif
2529 /* XXX: this doesn't actually work properly for tail calls, so
2530 * tail calls are disabled when DUK_USE_NONSTD_FUNC_CALLER_PROPERTY
2531 * is in use.
2532 */
2533 duk__update_func_caller_prop(thr, func);
2534 act = thr->callstack + thr->callstack_top - 1;
2535#endif
2536
2537 act->flags = (DUK_HOBJECT_HAS_STRICT(func) ?
2540
2541 DUK_ASSERT(DUK_ACT_GET_FUNC(act) == func); /* already updated */
2542 DUK_ASSERT(act->var_env == NULL); /* already NULLed (by unwind) */
2543 DUK_ASSERT(act->lex_env == NULL); /* already NULLed (by unwind) */
2544 act->idx_bottom = entry_valstack_bottom_index; /* tail call -> reuse current "frame" */
2545 DUK_ASSERT(nregs >= 0);
2546#if 0 /* topmost activation idx_retval is considered garbage, no need to init */
2547 act->idx_retval = 0;
2548#endif
2549
2550 /*
2551 * Manipulate valstack so that args are on the current bottom and the
2552 * previous caller's 'this' binding (which is the value preceding the
2553 * current bottom) is replaced with the new 'this' binding:
2554 *
2555 * [ ... this_old | (crud) func this_new arg1 ... argN ]
2556 * --> [ ... this_new | arg1 ... argN ]
2557 *
2558 * For tail calling to work properly, the valstack bottom must not grow
2559 * here; otherwise crud would accumulate on the valstack.
2560 */
2561
2562 tv1 = thr->valstack_bottom - 1;
2563 tv2 = thr->valstack_bottom + idx_func + 1;
2564 DUK_ASSERT(tv1 >= thr->valstack && tv1 < thr->valstack_top); /* tv1 is -below- valstack_bottom */
2565 DUK_ASSERT(tv2 >= thr->valstack_bottom && tv2 < thr->valstack_top);
2566 DUK_TVAL_SET_TVAL_UPDREF(thr, tv1, tv2); /* side effects */
2567
2568 for (i_arg = 0; i_arg < idx_args; i_arg++) {
2569 /* XXX: block removal API primitive */
2570 /* Note: 'func' is popped from valstack here, but it is
2571 * already reachable from the activation.
2572 */
2573 duk_remove(ctx, 0);
2574 }
2575 idx_func = 0; DUK_UNREF(idx_func); /* really 'not applicable' anymore, should not be referenced after this */
2576 idx_args = 0;
2577
2578 /* [ ... this_new | arg1 ... argN ] */
2579 } else {
2580 DUK_DDD(DUK_DDDPRINT("not a tail call, pushing a new activation to callstack, to index %ld",
2581 (long) (thr->callstack_top)));
2582
2584
2585 if (call_flags & DUK_CALL_FLAG_IS_RESUME) {
2586 DUK_DDD(DUK_DDDPRINT("is resume -> no update to current activation (may not even exist)"));
2587 } else {
2588 DUK_DDD(DUK_DDDPRINT("update to current activation idx_retval"));
2590 DUK_ASSERT(thr->callstack_top >= 1);
2591 act = thr->callstack + thr->callstack_top - 1;
2594 act->idx_retval = entry_valstack_bottom_index + idx_func;
2595 }
2596
2598 act = thr->callstack + thr->callstack_top;
2599 thr->callstack_top++;
2601
2605
2606 act->flags = (DUK_HOBJECT_HAS_STRICT(func) ?
2608 0);
2609 act->func = func;
2610 act->var_env = NULL;
2611 act->lex_env = NULL;
2612#if defined(DUK_USE_NONSTD_FUNC_CALLER_PROPERTY)
2613 act->prev_caller = NULL;
2614#endif
2615 DUK_ASSERT(func != NULL);
2618#if defined(DUK_USE_DEBUGGER_SUPPORT)
2619 act->prev_line = 0;
2620#endif
2621 act->idx_bottom = entry_valstack_bottom_index + idx_args;
2622 DUK_ASSERT(nregs >= 0);
2623#if 0 /* topmost activation idx_retval is considered garbage, no need to init */
2624 act->idx_retval = 0;
2625#endif
2626 DUK_TVAL_SET_OBJECT(&act->tv_func, func); /* borrowed, no refcount */
2627
2628 DUK_HOBJECT_INCREF(thr, func); /* act->func */
2629
2630#if defined(DUK_USE_NONSTD_FUNC_CALLER_PROPERTY)
2631 duk__update_func_caller_prop(thr, func);
2632 act = thr->callstack + thr->callstack_top - 1;
2633#endif
2634 }
2635
2636 /* [ ... func this arg1 ... argN ] (not tail call)
2637 * [ this | arg1 ... argN ] (tail call)
2638 *
2639 * idx_args updated to match
2640 */
2641
2642 /*
2643 * Environment record creation and 'arguments' object creation.
2644 * Named function expression name binding is handled by the
2645 * compiler; the compiled function's parent env will contain
2646 * the (immutable) binding already.
2647 *
2648 * Delayed creation (on demand) is handled in duk_js_var.c.
2649 */
2650
2651 /* XXX: unify handling with native call. */
2652
2653 DUK_ASSERT(!DUK_HOBJECT_HAS_BOUND(func)); /* bound function chain has already been resolved */
2654
2655 if (!DUK_HOBJECT_HAS_NEWENV(func)) {
2656 /* use existing env (e.g. for non-strict eval); cannot have
2657 * an own 'arguments' object (but can refer to the existing one)
2658 */
2659
2660 duk__handle_oldenv_for_call(thr, func, act);
2661
2662 DUK_ASSERT(act->lex_env != NULL);
2663 DUK_ASSERT(act->var_env != NULL);
2664 goto env_done;
2665 }
2666
2668
2669 if (!DUK_HOBJECT_HAS_CREATEARGS(func)) {
2670 /* no need to create environment record now; leave as NULL */
2671 DUK_ASSERT(act->lex_env == NULL);
2672 DUK_ASSERT(act->var_env == NULL);
2673 goto env_done;
2674 }
2675
2676 /* third arg: absolute index (to entire valstack) of idx_bottom of new activation */
2678 DUK_ASSERT(env != NULL);
2679
2680 /* [ ... arg1 ... argN envobj ] */
2681
2682 /* original input stack before nargs/nregs handling must be
2683 * intact for 'arguments' object
2684 */
2686 duk__handle_createargs_for_call(thr, func, env, num_stack_args);
2687
2688 /* [ ... arg1 ... argN envobj ] */
2689
2690 act = thr->callstack + thr->callstack_top - 1;
2691 act->lex_env = env;
2692 act->var_env = env;
2693 DUK_HOBJECT_INCREF(thr, act->lex_env);
2694 DUK_HOBJECT_INCREF(thr, act->var_env);
2695 duk_pop(ctx);
2696
2697 env_done:
2698 /* [ ... arg1 ... argN ] */
2699
2700 /*
2701 * Setup value stack: clamp to 'nargs', fill up to 'nregs'
2702 */
2703
2705 num_stack_args,
2706 idx_args,
2707 nregs,
2708 nargs,
2709 func);
2710
2711 /*
2712 * Shift to new valstack_bottom.
2713 */
2714
2715 thr->valstack_bottom = thr->valstack_bottom + idx_args;
2716 /* keep current valstack_top */
2717 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
2719 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
2720
2721 /*
2722 * Return to bytecode executor, which will resume execution from
2723 * the topmost activation.
2724 */
2725
2726 return 1;
2727}
CURL_EXTERN int void * arg
Definition curl.h:2622
unsigned int duk_small_uint_t
duk_small_int_t duk_ret_t
duk_uint8_t duk_uint_fast8_t
duk_int_fast32_t duk_int_t
duk_uint_fast32_t duk_uint_t
duk_small_int_t duk_bool_t
#define DUK_ACT_FLAG_TAILCALLED
DUK_INTERNAL_DECL void duk_hthread_callstack_grow(duk_hthread *thr)
#define DUK_TVAL_SET_TVAL(v, x)
#define DUK_ACT_FLAG_PREVENT_YIELD
#define DUK_ERROR_RANGE(thr, msg)
#define DUK_ASSERT_CTX_VALID(ctx)
DUK_EXTERNAL void duk_pop_2(duk_context *ctx)
#define DUK_TVAL_SET_NULL(tv)
DUK_EXTERNAL duk_idx_t duk_normalize_index(duk_context *ctx, duk_idx_t index)
#define DUK_HCOMPILEDFUNCTION_GET_CODE_BASE(heap, h)
DUK_INTERNAL_DECL duk_hobject * duk_require_hobject(duk_context *ctx, duk_idx_t index)
#define DUK_ERROR_TYPE(thr, msg)
#define DUK_HOBJECT_HAS_BOUND(h)
DUK_INTERNAL_DECL duk_bool_t duk_get_prop_stridx(duk_context *ctx, duk_idx_t obj_index, duk_small_int_t stridx)
#define DUK_STRIDX_INT_FORMALS
#define DUK_CALL_FLAG_CONSTRUCTOR_CALL
#define DUK_CAT_GET_TYPE(c)
#define DUK_VSRESIZE_FLAG_THROW
#define DUK_STRIDX_LC_ARGUMENTS
DUK_INTERNAL_DECL void duk_hthread_catchstack_shrink_check(duk_hthread *thr)
#define DUK_TVAL_SET_OBJECT(tv, hptr)
DUK_INTERNAL_DECL void duk_hthread_catchstack_unwind(duk_hthread *thr, duk_size_t new_top)
#define DUK_TVAL_GET_OBJECT(tv)
DUK_INTERNAL_DECL duk_hobject * duk_create_activation_environment_record(duk_hthread *thr, duk_hobject *func, duk_size_t idx_bottom)
#define DUK_HOBJECT_CLASS_OBJECT
#define DUK_STR_INVALID_CALL_ARGS
#define DUK_BIDX_OBJECT_PROTOTYPE
#define DUK_HOBJECT_SET_EXOTIC_ARGUMENTS(h)
#define duk_xdef_prop_index_wec(ctx, obj_index, arr_index)
#define duk_xdef_prop_wec(ctx, obj_index)
#define DUK_TVAL_IS_UNDEFINED(tv)
#define DUK_HOBJECT_IS_COMPILEDFUNCTION(h)
#define DUK_TVAL_GET_LIGHTFUNC_FUNCPTR(tv)
DUK_EXTERNAL duk_idx_t duk_require_top_index(duk_context *ctx)
#define DUK_TVAL_IS_OBJECT(tv)
#define DUK_HOBJECT_HAS_CREATEARGS(h)
#define DUK_PROPDESC_FLAGS_NONE
DUK_EXTERNAL void duk_require_stack(duk_context *ctx, duk_idx_t extra)
#define DUK_TVAL_INCREF(thr, tv)
#define DUK_HOBJECT_CLASS_ARGUMENTS
#define DUK_ERROR_INTERNAL_DEFMSG(thr)
#define DUK_TVAL_GET_TAG(tv)
#define DUK_PROPDESC_FLAGS_WE
#define DUK_HOBJECT_IS_NONBOUND_FUNCTION(h)
#define DUK_VSRESIZE_FLAG_SHRINK
DUK_EXTERNAL void duk_remove(duk_context *ctx, duk_idx_t index)
#define DUK_TVAL_IS_UNUSED(tv)
#define DUK_HTHREAD_STRING_INT_VARENV(thr)
#define DUK_ASSERT_DISABLE(x)
#define DUK_HOBJECT_IS_CALLABLE(h)
DUK_EXTERNAL void duk_push_int(duk_context *ctx, duk_int_t val)
#define DUK_CALL_FLAG_IS_RESUME
#define DUK_ERROR_API(thr, msg)
#define DUK_HTHREAD_STRING_INT_LEXENV(thr)
#define DUK_HOBJECT_CLASS_AS_FLAGS(v)
#define DUK_CALL_FLAG_IS_TAILCALL
#define DUK_HTHREAD_STATE_INACTIVE
#define DUK_ACT_FLAG_DIRECT_EVAL
#define DUK_HOBJECT_IS_ENV(h)
#define DUK_LFUNC_NARGS_VARARGS
DUK_INTERNAL_DECL duk_tval * duk_hobject_find_existing_entry_tval_ptr(duk_heap *heap, duk_hobject *obj, duk_hstring *key)
DUK_EXTERNAL void duk_replace(duk_context *ctx, duk_idx_t to_index)
DUK_INTERNAL_DECL void duk_hthread_callstack_unwind(duk_hthread *thr, duk_size_t new_top)
DUK_EXTERNAL void duk_require_stack_top(duk_context *ctx, duk_idx_t top)
#define DUK_TVAL_SET_TVAL_UPDREF
#define DUK_HOBJECT_IS_NATIVEFUNCTION(h)
DUK_EXTERNAL const char * duk_get_string(duk_context *ctx, duk_idx_t index)
#define DUK_ACT_FLAG_CONSTRUCT
DUK_INTERNAL_DECL duk_bool_t duk_valstack_resize_raw(duk_context *ctx, duk_size_t min_new_size, duk_small_uint_t flags)
#define DUK_VSRESIZE_FLAG_COMPACT
#define DUK_TVAL_IS_LIGHTFUNC(tv)
DUK_EXTERNAL void duk_set_top(duk_context *ctx, duk_idx_t index)
#define DUK_HTHREAD_STATE_RUNNING
DUK_EXTERNAL void duk_dup(duk_context *ctx, duk_idx_t from_index)
#define DUK_TVAL_CHKFAST_INPLACE(v)
DUK_EXTERNAL void duk_insert(duk_context *ctx, duk_idx_t to_index)
DUK_EXTERNAL duk_idx_t duk_get_top(duk_context *ctx)
DUK_INTERNAL_DECL void duk_hthread_sync_and_null_currpc(duk_hthread *thr)
DUK_EXTERNAL duk_bool_t duk_get_prop_index(duk_context *ctx, duk_idx_t obj_index, duk_uarridx_t arr_index)
#define DUK_TVAL_IS_HEAP_ALLOCATED(tv)
DUK_INTERNAL_DECL void duk_push_tval(duk_context *ctx, duk_tval *tv)
duk_uint32_t duk_instr_t
#define DUK_HOBJECT_HAS_NATIVEFUNCTION(h)
DUK_EXTERNAL void duk_push_uint(duk_context *ctx, duk_uint_t val)
struct duk_tval_struct duk_tval
DUK_EXTERNAL void duk_push_undefined(duk_context *ctx)
#define DUK_STRIDX_INT_TARGET
#define DUK_STR_C_CALLSTACK_LIMIT
#define DUK_PROPDESC_FLAGS_WC
DUK_INTERNAL_DECL duk_tval * duk_require_tval(duk_context *ctx, duk_idx_t index)
DUK_INTERNAL_DECL void duk_xdef_prop_stridx_thrower(duk_context *ctx, duk_idx_t obj_index, duk_small_int_t stridx, duk_small_uint_t desc_flags)
DUK_INTERNAL_DECL void duk_hthread_callstack_shrink_check(duk_hthread *thr)
#define DUK_HOBJECT_BOUND_CHAIN_SANITY
#define DUK_HOBJECT_FLAG_EXTENSIBLE
DUK_INTERNAL_DECL duk_idx_t duk_push_object_helper(duk_context *ctx, duk_uint_t hobject_flags_and_class, duk_small_int_t prototype_bidx)
#define DUK_STRIDX_INT_VARENV
#define DUK_HOBJECT_HAS_NEWENV(h)
#define DUK_VALSTACK_API_ENTRY_MINIMUM
DUK_INTERNAL void duk_error_throw_from_negative_rc(duk_hthread *thr, duk_ret_t rc)
DUK_EXTERNAL void duk_to_object(duk_context *ctx, duk_idx_t index)
DUK_EXTERNAL const char * duk_to_string(duk_context *ctx, duk_idx_t index)
#define DUK_GET_TVAL_POSIDX(ctx, idx)
#define DUK_HOBJECT_INCREF(thr, h)
#define DUK_ACT_GET_FUNC(act)
DUK_EXTERNAL void duk_pop(duk_context *ctx)
DUK_INTERNAL_DECL duk_hobject * duk_get_hobject(duk_context *ctx, duk_idx_t index)
DUK_INTERNAL_DECL void duk_push_hobject(duk_context *ctx, duk_hobject *h)
#define DUK_TVAL_SET_UNDEFINED_UPDREF
#define DUK_LFUNC_FLAGS_GET_NARGS(lf_flags)
#define DUK_CALL_FLAG_DIRECT_EVAL
DUK_INTERNAL_DECL void duk_xdef_prop_stridx(duk_context *ctx, duk_idx_t obj_index, duk_small_int_t stridx, duk_small_uint_t desc_flags)
DUK_EXTERNAL duk_bool_t duk_is_string(duk_context *ctx, duk_idx_t index)
#define DUK_HEAP_SWITCH_THREAD(heap, newthr)
#define DUK_STR_BOUND_CHAIN_LIMIT
#define DUK_TVAL_SET_UNDEFINED(tv)
DUK_EXTERNAL duk_int_t duk_require_int(duk_context *ctx, duk_idx_t index)
DUK_INTERNAL_DECL const char * duk_push_string_tval_readable(duk_context *ctx, duk_tval *tv)
#define DUK_HOBJECT_HAS_STRICT(h)
#define DUK_HOBJECT_HAS_NOTAIL(h)
DUK_INTERNAL_DECL void duk_js_execute_bytecode(duk_hthread *exec_thr)
#define DUK_HOBJECT_INCREF_ALLOWNULL(thr, h)
#define DUK_HTHREAD_STRING_CALLER(thr)
#define DUK_ERROR_FMT1(thr, err, fmt, arg1)
#define DUK_HOBJECT_HAS_COMPILEDFUNCTION(h)
#define DUK_TVAL_GET_LIGHTFUNC_FLAGS(tv)
#define DUK_VALSTACK_INTERNAL_EXTRA
#define DUK_HOBJECT_FLAG_ARRAY_PART
DUK_EXTERNAL duk_bool_t duk_has_prop(duk_context *ctx, duk_idx_t obj_index)
DUK_INTERNAL_DECL duk_tval * duk_get_tval(duk_context *ctx, duk_idx_t index)
#define DUK_CALL_FLAG_IGNORE_RECLIMIT
duk_ret_t(* duk_c_function)(duk_context *ctx)
duk_ret_t(* duk_safe_call_function)(duk_context *ctx)
DUK_LOCAL void duk__coerce_effective_this_binding(duk_hthread *thr, duk_hobject *func, duk_idx_t idx_this)
DUK_LOCAL void duk__safe_call_adjust_valstack(duk_hthread *thr, duk_idx_t idx_retbase, duk_idx_t num_stack_rets, duk_idx_t num_actual_rets)
DUK_LOCAL duk_hobject * duk__nonbound_func_lookup(duk_context *ctx, duk_idx_t idx_func, duk_idx_t *out_num_stack_args, duk_tval **out_tv_func, duk_small_uint_t call_flags)
DUK_LOCAL void duk__create_arguments_object(duk_hthread *thr, duk_hobject *func, duk_hobject *varenv, duk_idx_t idx_argbase, duk_idx_t num_stack_args)
DUK_LOCAL duk_idx_t duk__get_idx_func(duk_hthread *thr, duk_idx_t num_stack_args)
DUK_LOCAL void duk__handle_bound_chain_for_call(duk_hthread *thr, duk_idx_t idx_func, duk_idx_t *p_num_stack_args, duk_bool_t is_constructor_call)
DUK_LOCAL void duk__handle_oldenv_for_call(duk_hthread *thr, duk_hobject *func, duk_activation *act)
DUK_INTERNAL duk_int_t duk_handle_safe_call(duk_hthread *thr, duk_safe_call_function func, duk_idx_t num_stack_args, duk_idx_t num_stack_rets)
DUK_LOCAL void duk__adjust_valstack_and_top(duk_hthread *thr, duk_idx_t num_stack_args, duk_idx_t idx_args, duk_idx_t nregs, duk_idx_t nargs, duk_hobject *func)
DUK_LOCAL void duk__handle_safe_call_error(duk_hthread *thr, duk_idx_t idx_retbase, duk_idx_t num_stack_rets, duk_size_t entry_valstack_bottom_index, duk_size_t entry_callstack_top, duk_size_t entry_catchstack_top, duk_jmpbuf *old_jmpbuf_ptr)
DUK_LOCAL void duk__handle_call_inner(duk_hthread *thr, duk_idx_t num_stack_args, duk_small_uint_t call_flags, duk_idx_t idx_func)
DUK_LOCAL void duk__handle_call_error(duk_hthread *thr, duk_size_t entry_valstack_bottom_index, duk_size_t entry_valstack_end, duk_size_t entry_catchstack_top, duk_size_t entry_callstack_top, duk_int_t entry_call_recursion_depth, duk_hthread *entry_curr_thread, duk_uint_fast8_t entry_thread_state, duk_instr_t **entry_ptr_curr_pc, duk_idx_t idx_func, duk_jmpbuf *old_jmpbuf_ptr)
DUK_LOCAL void duk__handle_safe_call_shared(duk_hthread *thr, duk_idx_t idx_retbase, duk_idx_t num_stack_rets, duk_int_t entry_call_recursion_depth, duk_hthread *entry_curr_thread, duk_uint_fast8_t entry_thread_state, duk_instr_t **entry_ptr_curr_pc)
DUK_INTERNAL void duk_handle_call_unprotected(duk_hthread *thr, duk_idx_t num_stack_args, duk_small_uint_t call_flags)
DUK_LOCAL void duk__handle_createargs_for_call(duk_hthread *thr, duk_hobject *func, duk_hobject *env, duk_idx_t num_stack_args)
DUK_INTERNAL duk_int_t duk_handle_call_protected(duk_hthread *thr, duk_idx_t num_stack_args, duk_small_uint_t call_flags)
DUK_LOCAL void duk__handle_safe_call_inner(duk_hthread *thr, duk_safe_call_function func, duk_idx_t idx_retbase, duk_idx_t num_stack_rets, duk_size_t entry_valstack_bottom_index, duk_size_t entry_callstack_top, duk_size_t entry_catchstack_top)
DUK_INTERNAL duk_bool_t duk_handle_ecma_call_setup(duk_hthread *thr, duk_idx_t num_stack_args, duk_small_uint_t call_flags)
#define NULL
Definition gmacros.h:924
duk_hobject * builtins[DUK_NUM_BUILTINS]