Github User Fetcher 1.0.0
C Application with Server and GUI
Loading...
Searching...
No Matches
duktape-1.8.0/src-separate/duk_js_call.c
Go to the documentation of this file.
1/*
2 * Call handling.
3 *
4 * Main functions are:
5 *
6 * - duk_handle_call_unprotected(): unprotected call to Ecmascript or
7 * Duktape/C function
8 * - duk_handle_call_protected(): protected call to Ecmascript or
9 * Duktape/C function
10 * - duk_handle_safe_call(): make a protected C call within current
11 * activation
12 * - duk_handle_ecma_call_setup(): Ecmascript-to-Ecmascript calls
13 * (not always possible), including tail calls and coroutine resume
14 *
15 * See 'execution.rst'.
16 *
17 * Note: setjmp() and local variables have a nasty interaction,
18 * see execution.rst; non-volatile locals modified after setjmp()
19 * call are not guaranteed to keep their value.
20 */
21
22#include "duk_internal.h"
23
24/*
25 * Forward declarations.
26 */
27
29 duk_idx_t num_stack_args,
30 duk_small_uint_t call_flags,
31 duk_idx_t idx_func);
33 duk_size_t entry_valstack_bottom_index,
34 duk_size_t entry_valstack_end,
35 duk_size_t entry_catchstack_top,
36 duk_size_t entry_callstack_top,
37 duk_int_t entry_call_recursion_depth,
38 duk_hthread *entry_curr_thread,
39 duk_uint_fast8_t entry_thread_state,
40 duk_instr_t **entry_ptr_curr_pc,
41 duk_idx_t idx_func,
42 duk_jmpbuf *old_jmpbuf_ptr);
45 duk_idx_t idx_retbase,
46 duk_idx_t num_stack_rets,
47 duk_size_t entry_valstack_bottom_index,
48 duk_size_t entry_callstack_top,
49 duk_size_t entry_catchstack_top);
51 duk_idx_t idx_retbase,
52 duk_idx_t num_stack_rets,
53 duk_size_t entry_valstack_bottom_index,
54 duk_size_t entry_callstack_top,
55 duk_size_t entry_catchstack_top,
56 duk_jmpbuf *old_jmpbuf_ptr);
58 duk_idx_t idx_retbase,
59 duk_idx_t num_stack_rets,
60 duk_int_t entry_call_recursion_depth,
61 duk_hthread *entry_curr_thread,
62 duk_uint_fast8_t entry_thread_state,
63 duk_instr_t **entry_ptr_curr_pc);
64
65/*
66 * Interrupt counter fixup (for development only).
67 */
68
69#if defined(DUK_USE_INTERRUPT_COUNTER) && defined(DUK_USE_DEBUG)
70DUK_LOCAL void duk__interrupt_fixup(duk_hthread *thr, duk_hthread *entry_curr_thread) {
71 /* Currently the bytecode executor and executor interrupt
72 * instruction counts are off because we don't execute the
73 * interrupt handler when we're about to exit from the initial
74 * user call into Duktape.
75 *
76 * If we were to execute the interrupt handler here, the counts
77 * would match. You can enable this block manually to check
78 * that this is the case.
79 */
80
81 DUK_ASSERT(thr != NULL);
82 DUK_ASSERT(thr->heap != NULL);
83
84#if defined(DUK_USE_INTERRUPT_DEBUG_FIXUP)
85 if (entry_curr_thread == NULL) {
86 thr->interrupt_init = thr->interrupt_init - thr->interrupt_counter;
87 thr->heap->inst_count_interrupt += thr->interrupt_init;
88 DUK_DD(DUK_DDPRINT("debug test: updated interrupt count on exit to "
89 "user code, instruction counts: executor=%ld, interrupt=%ld",
90 (long) thr->heap->inst_count_exec, (long) thr->heap->inst_count_interrupt));
91 DUK_ASSERT(thr->heap->inst_count_exec == thr->heap->inst_count_interrupt);
92 }
93#else
94 DUK_UNREF(thr);
95 DUK_UNREF(entry_curr_thread);
96#endif
97}
98#endif
99
100/*
101 * Arguments object creation.
102 *
103 * Creating arguments objects involves many small details, see E5 Section
104 * 10.6 for the specific requirements. Much of the arguments object exotic
105 * behavior is implemented in duk_hobject_props.c, and is enabled by the
106 * object flag DUK_HOBJECT_FLAG_EXOTIC_ARGUMENTS.
107 */
108
110 duk_hobject *func,
111 duk_hobject *varenv,
112 duk_idx_t idx_argbase, /* idx of first argument on stack */
113 duk_idx_t num_stack_args) { /* num args starting from idx_argbase */
114 duk_context *ctx = (duk_context *) thr;
115 duk_hobject *arg; /* 'arguments' */
116 duk_hobject *formals; /* formals for 'func' (may be NULL if func is a C function) */
117 duk_idx_t i_arg;
118 duk_idx_t i_map;
119 duk_idx_t i_mappednames;
120 duk_idx_t i_formals;
121 duk_idx_t i_argbase;
122 duk_idx_t n_formals;
123 duk_idx_t idx;
124 duk_bool_t need_map;
125
126 DUK_DDD(DUK_DDDPRINT("creating arguments object for func=%!iO, varenv=%!iO, "
127 "idx_argbase=%ld, num_stack_args=%ld",
128 (duk_heaphdr *) func, (duk_heaphdr *) varenv,
129 (long) idx_argbase, (long) num_stack_args));
130
131 DUK_ASSERT(thr != NULL);
132 DUK_ASSERT(func != NULL);
134 DUK_ASSERT(varenv != NULL);
135 DUK_ASSERT(idx_argbase >= 0); /* assumed to bottom relative */
136 DUK_ASSERT(num_stack_args >= 0);
137
138 need_map = 0;
139
140 i_argbase = idx_argbase;
141 DUK_ASSERT(i_argbase >= 0);
142
143 duk_push_hobject(ctx, func);
145 formals = duk_get_hobject(ctx, -1);
146 n_formals = 0;
147 if (formals) {
149 n_formals = (duk_idx_t) duk_require_int(ctx, -1);
150 duk_pop(ctx);
151 }
152 duk_remove(ctx, -2); /* leave formals on stack for later use */
153 i_formals = duk_require_top_index(ctx);
154
155 DUK_ASSERT(n_formals >= 0);
156 DUK_ASSERT(formals != NULL || n_formals == 0);
157
158 DUK_DDD(DUK_DDDPRINT("func=%!O, formals=%!O, n_formals=%ld",
159 (duk_heaphdr *) func, (duk_heaphdr *) formals,
160 (long) n_formals));
161
162 /* [ ... formals ] */
163
164 /*
165 * Create required objects:
166 * - 'arguments' object: array-like, but not an array
167 * - 'map' object: internal object, tied to 'arguments'
168 * - 'mappedNames' object: temporary value used during construction
169 */
170
171 i_arg = duk_push_object_helper(ctx,
176 DUK_ASSERT(i_arg >= 0);
177 arg = duk_require_hobject(ctx, -1);
178 DUK_ASSERT(arg != NULL);
179
180 i_map = duk_push_object_helper(ctx,
183 -1); /* no prototype */
184 DUK_ASSERT(i_map >= 0);
185
186 i_mappednames = duk_push_object_helper(ctx,
189 -1); /* no prototype */
190 DUK_ASSERT(i_mappednames >= 0);
191
192 /* [ ... formals arguments map mappedNames ] */
193
194 DUK_DDD(DUK_DDDPRINT("created arguments related objects: "
195 "arguments at index %ld -> %!O "
196 "map at index %ld -> %!O "
197 "mappednames at index %ld -> %!O",
198 (long) i_arg, (duk_heaphdr *) duk_get_hobject(ctx, i_arg),
199 (long) i_map, (duk_heaphdr *) duk_get_hobject(ctx, i_map),
200 (long) i_mappednames, (duk_heaphdr *) duk_get_hobject(ctx, i_mappednames)));
201
202 /*
203 * Init arguments properties, map, etc.
204 */
205
206 duk_push_int(ctx, num_stack_args);
208
209 /*
210 * Init argument related properties
211 */
212
213 /* step 11 */
214 idx = num_stack_args - 1;
215 while (idx >= 0) {
216 DUK_DDD(DUK_DDDPRINT("arg idx %ld, argbase=%ld, argidx=%ld",
217 (long) idx, (long) i_argbase, (long) (i_argbase + idx)));
218
219 DUK_DDD(DUK_DDDPRINT("define arguments[%ld]=arg", (long) idx));
220 duk_dup(ctx, i_argbase + idx);
221 duk_xdef_prop_index_wec(ctx, i_arg, (duk_uarridx_t) idx);
222 DUK_DDD(DUK_DDDPRINT("defined arguments[%ld]=arg", (long) idx));
223
224 /* step 11.c is relevant only if non-strict (checked in 11.c.ii) */
225 if (!DUK_HOBJECT_HAS_STRICT(func) && idx < n_formals) {
226 DUK_ASSERT(formals != NULL);
227
228 DUK_DDD(DUK_DDDPRINT("strict function, index within formals (%ld < %ld)",
229 (long) idx, (long) n_formals));
230
231 duk_get_prop_index(ctx, i_formals, idx);
232 DUK_ASSERT(duk_is_string(ctx, -1));
233
234 duk_dup(ctx, -1); /* [ ... name name ] */
235
236 if (!duk_has_prop(ctx, i_mappednames)) {
237 /* steps 11.c.ii.1 - 11.c.ii.4, but our internal book-keeping
238 * differs from the reference model
239 */
240
241 /* [ ... name ] */
242
243 need_map = 1;
244
245 DUK_DDD(DUK_DDDPRINT("set mappednames[%s]=%ld",
246 (const char *) duk_get_string(ctx, -1),
247 (long) idx));
248 duk_dup(ctx, -1); /* name */
249 duk_push_uint(ctx, (duk_uint_t) idx); /* index */
250 duk_to_string(ctx, -1);
251 duk_xdef_prop_wec(ctx, i_mappednames); /* out of spec, must be configurable */
252
253 DUK_DDD(DUK_DDDPRINT("set map[%ld]=%s",
254 (long) idx,
255 duk_get_string(ctx, -1)));
256 duk_dup(ctx, -1); /* name */
257 duk_xdef_prop_index_wec(ctx, i_map, (duk_uarridx_t) idx); /* out of spec, must be configurable */
258 } else {
259 /* duk_has_prop() popped the second 'name' */
260 }
261
262 /* [ ... name ] */
263 duk_pop(ctx); /* pop 'name' */
264 }
265
266 idx--;
267 }
268
269 DUK_DDD(DUK_DDDPRINT("actual arguments processed"));
270
271 /* step 12 */
272 if (need_map) {
273 DUK_DDD(DUK_DDDPRINT("adding 'map' and 'varenv' to arguments object"));
274
275 /* should never happen for a strict callee */
277
278 duk_dup(ctx, i_map);
279 duk_xdef_prop_stridx(ctx, i_arg, DUK_STRIDX_INT_MAP, DUK_PROPDESC_FLAGS_NONE); /* out of spec, don't care */
280
281 /* The variable environment for magic variable bindings needs to be
282 * given by the caller and recorded in the arguments object.
283 *
284 * See E5 Section 10.6, the creation of setters/getters.
285 *
286 * The variable environment also provides access to the callee, so
287 * an explicit (internal) callee property is not needed.
288 */
289
290 duk_push_hobject(ctx, varenv);
291 duk_xdef_prop_stridx(ctx, i_arg, DUK_STRIDX_INT_VARENV, DUK_PROPDESC_FLAGS_NONE); /* out of spec, don't care */
292 }
293
294 /* steps 13-14 */
295 if (DUK_HOBJECT_HAS_STRICT(func)) {
296 /* Callee/caller are throwers and are not deletable etc. They
297 * could be implemented as virtual properties, but currently
298 * there is no support for virtual properties which are accessors
299 * (only plain virtual properties). This would not be difficult
300 * to change in duk_hobject_props, but we can make the throwers
301 * normal, concrete properties just as easily.
302 *
303 * Note that the specification requires that the *same* thrower
304 * built-in object is used here! See E5 Section 10.6 main
305 * algoritm, step 14, and Section 13.2.3 which describes the
306 * thrower. See test case test-arguments-throwers.js.
307 */
308
309 DUK_DDD(DUK_DDDPRINT("strict function, setting caller/callee to throwers"));
310
313 } else {
314 DUK_DDD(DUK_DDDPRINT("non-strict function, setting callee to actual value"));
315 duk_push_hobject(ctx, func);
317 }
318
319 /* set exotic behavior only after we're done */
320 if (need_map) {
321 /* Exotic behaviors are only enabled for arguments objects
322 * which have a parameter map (see E5 Section 10.6 main
323 * algorithm, step 12).
324 *
325 * In particular, a non-strict arguments object with no
326 * mapped formals does *NOT* get exotic behavior, even
327 * for e.g. "caller" property. This seems counterintuitive
328 * but seems to be the case.
329 */
330
331 /* cannot be strict (never mapped variables) */
333
334 DUK_DDD(DUK_DDDPRINT("enabling exotic behavior for arguments object"));
336 } else {
337 DUK_DDD(DUK_DDDPRINT("not enabling exotic behavior for arguments object"));
338 }
339
340 DUK_DDD(DUK_DDDPRINT("final arguments related objects: "
341 "arguments at index %ld -> %!O "
342 "map at index %ld -> %!O "
343 "mappednames at index %ld -> %!O",
344 (long) i_arg, (duk_heaphdr *) duk_get_hobject(ctx, i_arg),
345 (long) i_map, (duk_heaphdr *) duk_get_hobject(ctx, i_map),
346 (long) i_mappednames, (duk_heaphdr *) duk_get_hobject(ctx, i_mappednames)));
347
348 /* [ args(n) [crud] formals arguments map mappednames ] */
349
350 duk_pop_2(ctx);
351 duk_remove(ctx, -2);
352
353 /* [ args [crud] arguments ] */
354}
355
356/* Helper for creating the arguments object and adding it to the env record
357 * on top of the value stack. This helper has a very strict dependency on
358 * the shape of the input stack.
359 */
361 duk_hobject *func,
362 duk_hobject *env,
363 duk_idx_t num_stack_args) {
364 duk_context *ctx = (duk_context *) thr;
365
366 DUK_DDD(DUK_DDDPRINT("creating arguments object for function call"));
367
368 DUK_ASSERT(thr != NULL);
369 DUK_ASSERT(func != NULL);
370 DUK_ASSERT(env != NULL);
372 DUK_ASSERT(duk_get_top(ctx) >= num_stack_args + 1);
373
374 /* [ ... arg1 ... argN envobj ] */
375
377 func,
378 env,
379 duk_get_top(ctx) - num_stack_args - 1, /* idx_argbase */
380 num_stack_args);
381
382 /* [ ... arg1 ... argN envobj argobj ] */
383
385 -2,
387 DUK_HOBJECT_HAS_STRICT(func) ? DUK_PROPDESC_FLAGS_E : /* strict: non-deletable, non-writable */
388 DUK_PROPDESC_FLAGS_WE); /* non-strict: non-deletable, writable */
389 /* [ ... arg1 ... argN envobj ] */
390}
391
392/*
393 * Helper for handling a "bound function" chain when a call is being made.
394 *
395 * Follows the bound function chain until a non-bound function is found.
396 * Prepends the bound arguments to the value stack (at idx_func + 2),
397 * updating 'num_stack_args' in the process. The 'this' binding is also
398 * updated if necessary (at idx_func + 1). Note that for constructor calls
399 * the 'this' binding is never updated by [[BoundThis]].
400 *
401 * XXX: bound function chains could be collapsed at bound function creation
402 * time so that each bound function would point directly to a non-bound
403 * function. This would make call time handling much easier.
404 */
405
407 duk_idx_t idx_func,
408 duk_idx_t *p_num_stack_args, /* may be changed by call */
409 duk_bool_t is_constructor_call) {
410 duk_context *ctx = (duk_context *) thr;
411 duk_idx_t num_stack_args;
412 duk_tval *tv_func;
413 duk_hobject *func;
414 duk_uint_t sanity;
415
416 DUK_ASSERT(thr != NULL);
417 DUK_ASSERT(p_num_stack_args != NULL);
418
419 /* On entry, item at idx_func is a bound, non-lightweight function,
420 * but we don't rely on that below.
421 */
422
423 num_stack_args = *p_num_stack_args;
424
426 do {
427 duk_idx_t i, len;
428
429 tv_func = duk_require_tval(ctx, idx_func);
430 DUK_ASSERT(tv_func != NULL);
431
432 if (DUK_TVAL_IS_LIGHTFUNC(tv_func)) {
433 /* Lightweight function: never bound, so terminate. */
434 break;
435 } else if (DUK_TVAL_IS_OBJECT(tv_func)) {
436 func = DUK_TVAL_GET_OBJECT(tv_func);
437 if (!DUK_HOBJECT_HAS_BOUND(func)) {
438 /* Normal non-bound function. */
439 break;
440 }
441 } else {
442 /* Function.prototype.bind() should never let this happen,
443 * ugly error message is enough.
444 */
446 }
448
449 /* XXX: this could be more compact by accessing the internal properties
450 * directly as own properties (they cannot be inherited, and are not
451 * externally visible).
452 */
453
454 DUK_DDD(DUK_DDDPRINT("bound function encountered, ptr=%p, num_stack_args=%ld: %!T",
455 (void *) DUK_TVAL_GET_OBJECT(tv_func), (long) num_stack_args, tv_func));
456
457 /* [ ... func this arg1 ... argN ] */
458
459 if (is_constructor_call) {
460 /* See: tests/ecmascript/test-spec-bound-constructor.js */
461 DUK_DDD(DUK_DDDPRINT("constructor call: don't update this binding"));
462 } else {
464 duk_replace(ctx, idx_func + 1); /* idx_this = idx_func + 1 */
465 }
466
467 /* [ ... func this arg1 ... argN ] */
468
469 /* XXX: duk_get_length? */
470 duk_get_prop_stridx(ctx, idx_func, DUK_STRIDX_INT_ARGS); /* -> [ ... func this arg1 ... argN _Args ] */
471 duk_get_prop_stridx(ctx, -1, DUK_STRIDX_LENGTH); /* -> [ ... func this arg1 ... argN _Args length ] */
472 len = (duk_idx_t) duk_require_int(ctx, -1);
473 duk_pop(ctx);
474
475 duk_require_stack(ctx, len);
476 for (i = 0; i < len; i++) {
477 /* XXX: very slow - better to bulk allocate a gap, and copy
478 * from args_array directly (we know it has a compact array
479 * part, etc).
480 */
481
482 /* [ ... func this <some bound args> arg1 ... argN _Args ] */
483 duk_get_prop_index(ctx, -1, i);
484 duk_insert(ctx, idx_func + 2 + i); /* idx_args = idx_func + 2 */
485 }
486 num_stack_args += len; /* must be updated to work properly (e.g. creation of 'arguments') */
487 duk_pop(ctx);
488
489 /* [ ... func this <bound args> arg1 ... argN ] */
490
492 duk_replace(ctx, idx_func); /* replace in stack */
493
494 DUK_DDD(DUK_DDDPRINT("bound function handled, num_stack_args=%ld, idx_func=%ld, curr func=%!T",
495 (long) num_stack_args, (long) idx_func, duk_get_tval(ctx, idx_func)));
496 } while (--sanity > 0);
497
498 if (sanity == 0) {
500 }
501
502 DUK_DDD(DUK_DDDPRINT("final non-bound function is: %!T", duk_get_tval(ctx, idx_func)));
503
504#if defined(DUK_USE_ASSERTIONS)
505 tv_func = duk_require_tval(ctx, idx_func);
507 if (DUK_TVAL_IS_OBJECT(tv_func)) {
508 func = DUK_TVAL_GET_OBJECT(tv_func);
509 DUK_ASSERT(func != NULL);
513 }
514#endif
515
516 /* write back */
517 *p_num_stack_args = num_stack_args;
518}
519
520/*
521 * Helper for setting up var_env and lex_env of an activation,
522 * assuming it does NOT have the DUK_HOBJECT_FLAG_NEWENV flag.
523 */
524
526 duk_hobject *func,
527 duk_activation *act) {
528 duk_tval *tv;
529
530 DUK_ASSERT(thr != NULL);
531 DUK_ASSERT(func != NULL);
532 DUK_ASSERT(act != NULL);
535
537 if (tv) {
540 act->lex_env = DUK_TVAL_GET_OBJECT(tv);
541
543 if (tv) {
546 act->var_env = DUK_TVAL_GET_OBJECT(tv);
547 } else {
548 act->var_env = act->lex_env;
549 }
550 } else {
552 act->var_env = act->lex_env;
553 }
554
557}
558
559/*
560 * Helper for updating callee 'caller' property.
561 */
562
563#if defined(DUK_USE_NONSTD_FUNC_CALLER_PROPERTY)
564DUK_LOCAL void duk__update_func_caller_prop(duk_hthread *thr, duk_hobject *func) {
565 duk_tval *tv_caller;
566 duk_hobject *h_tmp;
567 duk_activation *act_callee;
568 duk_activation *act_caller;
569
570 DUK_ASSERT(thr != NULL);
571 DUK_ASSERT(func != NULL);
572 DUK_ASSERT(!DUK_HOBJECT_HAS_BOUND(func)); /* bound chain resolved */
573 DUK_ASSERT(thr->callstack_top >= 1);
574
575 if (DUK_HOBJECT_HAS_STRICT(func)) {
576 /* Strict functions don't get their 'caller' updated. */
577 return;
578 }
579
580 act_callee = thr->callstack + thr->callstack_top - 1;
581 act_caller = (thr->callstack_top >= 2 ? act_callee - 1 : NULL);
582
583 /* XXX: check .caller writability? */
584
585 /* Backup 'caller' property and update its value. */
587 if (tv_caller) {
588 /* If caller is global/eval code, 'caller' should be set to
589 * 'null'.
590 *
591 * XXX: there is no exotic flag to infer this correctly now.
592 * The NEWENV flag is used now which works as intended for
593 * everything (global code, non-strict eval code, and functions)
594 * except strict eval code. Bound functions are never an issue
595 * because 'func' has been resolved to a non-bound function.
596 */
597
598 if (act_caller) {
599 /* act_caller->func may be NULL in some finalization cases,
600 * just treat like we don't know the caller.
601 */
602 if (act_caller->func && !DUK_HOBJECT_HAS_NEWENV(act_caller->func)) {
603 /* Setting to NULL causes 'caller' to be set to
604 * 'null' as desired.
605 */
606 act_caller = NULL;
607 }
608 }
609
610 if (DUK_TVAL_IS_OBJECT(tv_caller)) {
611 h_tmp = DUK_TVAL_GET_OBJECT(tv_caller);
612 DUK_ASSERT(h_tmp != NULL);
613 act_callee->prev_caller = h_tmp;
614
615 /* Previous value doesn't need refcount changes because its ownership
616 * is transferred to prev_caller.
617 */
618
619 if (act_caller) {
620 DUK_ASSERT(act_caller->func != NULL);
621 DUK_TVAL_SET_OBJECT(tv_caller, act_caller->func);
622 DUK_TVAL_INCREF(thr, tv_caller);
623 } else {
624 DUK_TVAL_SET_NULL(tv_caller); /* no incref */
625 }
626 } else {
627 /* 'caller' must only take on 'null' or function value */
629 DUK_ASSERT(act_callee->prev_caller == NULL);
630 if (act_caller && act_caller->func) {
631 /* Tolerate act_caller->func == NULL which happens in
632 * some finalization cases; treat like unknown caller.
633 */
634 DUK_TVAL_SET_OBJECT(tv_caller, act_caller->func);
635 DUK_TVAL_INCREF(thr, tv_caller);
636 } else {
637 DUK_TVAL_SET_NULL(tv_caller); /* no incref */
638 }
639 }
640 }
641}
642#endif /* DUK_USE_NONSTD_FUNC_CALLER_PROPERTY */
643
644/*
645 * Determine the effective 'this' binding and coerce the current value
646 * on the valstack to the effective one (in-place, at idx_this).
647 *
648 * The current this value in the valstack (at idx_this) represents either:
649 * - the caller's requested 'this' binding; or
650 * - a 'this' binding accumulated from the bound function chain
651 *
652 * The final 'this' binding for the target function may still be
653 * different, and is determined as described in E5 Section 10.4.3.
654 *
655 * For global and eval code (E5 Sections 10.4.1 and 10.4.2), we assume
656 * that the caller has provided the correct 'this' binding explicitly
657 * when calling, i.e.:
658 *
659 * - global code: this=global object
660 * - direct eval: this=copy from eval() caller's this binding
661 * - other eval: this=global object
662 *
663 * Note: this function may cause a recursive function call with arbitrary
664 * side effects, because ToObject() may be called.
665 */
666
668 duk_hobject *func,
669 duk_idx_t idx_this) {
670 duk_context *ctx = (duk_context *) thr;
671 duk_tval *tv_this;
672 duk_hobject *obj_global;
673
674 if (func == NULL || DUK_HOBJECT_HAS_STRICT(func)) {
675 /* Lightfuncs are always considered strict. */
676 DUK_DDD(DUK_DDDPRINT("this binding: strict -> use directly"));
677 return;
678 }
679
680 /* XXX: byte offset */
681 tv_this = thr->valstack_bottom + idx_this;
682 switch (DUK_TVAL_GET_TAG(tv_this)) {
683 case DUK_TAG_OBJECT:
684 case DUK_TAG_LIGHTFUNC: /* lightfuncs are treated like objects and not coerced */
685 DUK_DDD(DUK_DDDPRINT("this binding: non-strict, object -> use directly"));
686 break;
688 case DUK_TAG_NULL:
689 DUK_DDD(DUK_DDDPRINT("this binding: non-strict, undefined/null -> use global object"));
690 obj_global = thr->builtins[DUK_BIDX_GLOBAL];
691 /* XXX: avoid this check somehow */
692 if (DUK_LIKELY(obj_global != NULL)) {
693 DUK_ASSERT(!DUK_TVAL_IS_HEAP_ALLOCATED(tv_this)); /* no need to decref previous value */
694 DUK_TVAL_SET_OBJECT(tv_this, obj_global);
695 DUK_HOBJECT_INCREF(thr, obj_global);
696 } else {
697 /* This may only happen if built-ins are being "torn down".
698 * This behavior is out of specification scope.
699 */
700 DUK_D(DUK_DPRINT("this binding: wanted to use global object, but it is NULL -> using undefined instead"));
701 DUK_ASSERT(!DUK_TVAL_IS_HEAP_ALLOCATED(tv_this)); /* no need to decref previous value */
702 DUK_TVAL_SET_UNDEFINED(tv_this); /* nothing to incref */
703 }
704 break;
705 default:
707 DUK_DDD(DUK_DDDPRINT("this binding: non-strict, not object/undefined/null -> use ToObject(value)"));
708 duk_to_object(ctx, idx_this); /* may have side effects */
709 break;
710 }
711}
712
713/*
714 * Shared helper for non-bound func lookup.
715 *
716 * Returns duk_hobject * to the final non-bound function (NULL for lightfunc).
717 */
718
720 duk_idx_t idx_func,
721 duk_idx_t *out_num_stack_args,
722 duk_tval **out_tv_func,
723 duk_small_uint_t call_flags) {
724 duk_hthread *thr = (duk_hthread *) ctx;
725 duk_tval *tv_func;
726 duk_hobject *func;
727
728 for (;;) {
729 /* Use loop to minimize code size of relookup after bound function case */
730 tv_func = DUK_GET_TVAL_POSIDX(ctx, idx_func);
731 DUK_ASSERT(tv_func != NULL);
732
733 if (DUK_TVAL_IS_OBJECT(tv_func)) {
734 func = DUK_TVAL_GET_OBJECT(tv_func);
735 if (!DUK_HOBJECT_IS_CALLABLE(func)) {
736 goto not_callable_error;
737 }
738 if (DUK_HOBJECT_HAS_BOUND(func)) {
739 duk__handle_bound_chain_for_call(thr, idx_func, out_num_stack_args, call_flags & DUK_CALL_FLAG_CONSTRUCTOR_CALL);
740
741 /* The final object may be a normal function or a lightfunc.
742 * We need to re-lookup tv_func because it may have changed
743 * (also value stack may have been resized). Loop again to
744 * do that; we're guaranteed not to come here again.
745 */
748 continue;
749 }
750 } else if (DUK_TVAL_IS_LIGHTFUNC(tv_func)) {
751 func = NULL;
752 } else {
753 goto not_callable_error;
754 }
755 break;
756 }
757
759 DUK_TVAL_IS_LIGHTFUNC(tv_func));
760 DUK_ASSERT(func == NULL || !DUK_HOBJECT_HAS_BOUND(func));
763
764 *out_tv_func = tv_func;
765 return func;
766
767 not_callable_error:
768 DUK_ASSERT(tv_func != NULL);
769#if defined(DUK_USE_PARANOID_ERRORS)
771#else
772 DUK_ERROR_FMT1(thr, DUK_ERR_TYPE_ERROR, "%s not callable", duk_push_string_tval_readable(ctx, tv_func));
773#endif
775 return NULL; /* never executed */
776}
777
778/*
779 * Value stack resize and stack top adjustment helper.
780 *
781 * XXX: This should all be merged to duk_valstack_resize_raw().
782 */
783
785 duk_idx_t num_stack_args,
786 duk_idx_t idx_args,
787 duk_idx_t nregs,
788 duk_idx_t nargs,
789 duk_hobject *func) {
790 duk_context *ctx = (duk_context *) thr;
791 duk_size_t vs_min_size;
792 duk_bool_t adjusted_top = 0;
793
794 vs_min_size = (thr->valstack_bottom - thr->valstack) + /* bottom of current func */
795 idx_args; /* bottom of new func */
796
797 if (nregs >= 0) {
798 DUK_ASSERT(nargs >= 0);
799 DUK_ASSERT(nregs >= nargs);
800 vs_min_size += nregs;
801 } else {
802 /* 'func' wants stack "as is" */
803 vs_min_size += num_stack_args; /* num entries of new func at entry */
804 }
805 if (func == NULL || DUK_HOBJECT_IS_NATIVEFUNCTION(func)) {
806 vs_min_size += DUK_VALSTACK_API_ENTRY_MINIMUM; /* Duktape/C API guaranteed entries (on top of args) */
807 }
808 vs_min_size += DUK_VALSTACK_INTERNAL_EXTRA; /* + spare */
809
810 /* XXX: We can't resize the value stack to a size smaller than the
811 * current top, so the order of the resize and adjusting the stack
812 * top depends on the current vs. final size of the value stack.
813 * The operations could be combined to avoid this, but the proper
814 * fix is to only grow the value stack on a function call, and only
815 * shrink it (without throwing if the shrink fails) on function
816 * return.
817 */
818
819 if (vs_min_size < (duk_size_t) (thr->valstack_top - thr->valstack)) {
820 DUK_DDD(DUK_DDDPRINT(("final size smaller, set top before resize")));
821
822 DUK_ASSERT(nregs >= 0); /* can't happen when keeping current stack size */
823 duk_set_top(ctx, idx_args + nargs); /* clamp anything above nargs */
824 duk_set_top(ctx, idx_args + nregs); /* extend with undefined */
825 adjusted_top = 1;
826 }
827
829 vs_min_size,
830 DUK_VSRESIZE_FLAG_SHRINK | /* flags */
831 0 /* no compact */ |
833
834 if (!adjusted_top) {
835 if (nregs >= 0) {
836 DUK_ASSERT(nregs >= nargs);
837 duk_set_top(ctx, idx_args + nargs); /* clamp anything above nargs */
838 duk_set_top(ctx, idx_args + nregs); /* extend with undefined */
839 }
840 }
841}
842
843/*
844 * Manipulate value stack so that exactly 'num_stack_rets' return
845 * values are at 'idx_retbase' in every case, assuming there are
846 * 'rc' return values on top of stack.
847 *
848 * This is a bit tricky, because the called C function operates in
849 * the same activation record and may have e.g. popped the stack
850 * empty (below idx_retbase).
851 */
852
853DUK_LOCAL void duk__safe_call_adjust_valstack(duk_hthread *thr, duk_idx_t idx_retbase, duk_idx_t num_stack_rets, duk_idx_t num_actual_rets) {
854 duk_context *ctx = (duk_context *) thr;
855 duk_idx_t idx_rcbase;
856
857 DUK_ASSERT(thr != NULL);
858 DUK_ASSERT(idx_retbase >= 0);
859 DUK_ASSERT(num_stack_rets >= 0);
860 DUK_ASSERT(num_actual_rets >= 0);
861
862 idx_rcbase = duk_get_top(ctx) - num_actual_rets; /* base of known return values */
863
864 DUK_DDD(DUK_DDDPRINT("adjust valstack after func call: "
865 "num_stack_rets=%ld, num_actual_rets=%ld, stack_top=%ld, idx_retbase=%ld, idx_rcbase=%ld",
866 (long) num_stack_rets, (long) num_actual_rets, (long) duk_get_top(ctx),
867 (long) idx_retbase, (long) idx_rcbase));
868
869 DUK_ASSERT(idx_rcbase >= 0); /* caller must check */
870
871 /* Ensure space for final configuration (idx_retbase + num_stack_rets)
872 * and intermediate configurations.
873 */
875 (idx_rcbase > idx_retbase ? idx_rcbase : idx_retbase) +
876 num_stack_rets);
877
878 /* Chop extra retvals away / extend with undefined. */
879 duk_set_top(ctx, idx_rcbase + num_stack_rets);
880
881 if (idx_rcbase >= idx_retbase) {
882 duk_idx_t count = idx_rcbase - idx_retbase;
883 duk_idx_t i;
884
885 DUK_DDD(DUK_DDDPRINT("elements at/after idx_retbase have enough to cover func retvals "
886 "(idx_retbase=%ld, idx_rcbase=%ld)", (long) idx_retbase, (long) idx_rcbase));
887
888 /* nuke values at idx_retbase to get the first retval (initially
889 * at idx_rcbase) to idx_retbase
890 */
891
892 DUK_ASSERT(count >= 0);
893
894 for (i = 0; i < count; i++) {
895 /* XXX: inefficient; block remove primitive */
896 duk_remove(ctx, idx_retbase);
897 }
898 } else {
899 duk_idx_t count = idx_retbase - idx_rcbase;
900 duk_idx_t i;
901
902 DUK_DDD(DUK_DDDPRINT("not enough elements at/after idx_retbase to cover func retvals "
903 "(idx_retbase=%ld, idx_rcbase=%ld)", (long) idx_retbase, (long) idx_rcbase));
904
905 /* insert 'undefined' values at idx_rcbase to get the
906 * return values to idx_retbase
907 */
908
909 DUK_ASSERT(count > 0);
910
911 for (i = 0; i < count; i++) {
912 /* XXX: inefficient; block insert primitive */
914 duk_insert(ctx, idx_rcbase);
915 }
916 }
917}
918
919/*
920 * Misc shared helpers.
921 */
922
923/* Get valstack index for the func argument or throw if insane stack. */
925 duk_size_t off_stack_top;
926 duk_size_t off_stack_args;
927 duk_size_t off_stack_all;
928 duk_idx_t idx_func; /* valstack index of 'func' and retval (relative to entry valstack_bottom) */
929
930 /* Argument validation and func/args offset. */
931 off_stack_top = (duk_size_t) ((duk_uint8_t *) thr->valstack_top - (duk_uint8_t *) thr->valstack_bottom);
932 off_stack_args = (duk_size_t) ((duk_size_t) num_stack_args * sizeof(duk_tval));
933 off_stack_all = off_stack_args + 2 * sizeof(duk_tval);
934 if (DUK_UNLIKELY(off_stack_all > off_stack_top)) {
935 /* Since stack indices are not reliable, we can't do anything useful
936 * here. Invoke the existing setjmp catcher, or if it doesn't exist,
937 * call the fatal error handler.
938 */
940 return 0;
941 }
942 idx_func = (duk_idx_t) ((off_stack_top - off_stack_all) / sizeof(duk_tval));
943 return idx_func;
944}
945
946/*
947 * duk_handle_call_protected() and duk_handle_call_unprotected():
948 * call into a Duktape/C or an Ecmascript function from any state.
949 *
950 * Input stack (thr):
951 *
952 * [ func this arg1 ... argN ]
953 *
954 * Output stack (thr):
955 *
956 * [ retval ] (DUK_EXEC_SUCCESS)
957 * [ errobj ] (DUK_EXEC_ERROR (normal error), protected call)
958 *
959 * Even when executing a protected call an error may be thrown in rare cases
960 * such as an insane num_stack_args argument. If there is no catchpoint for
961 * such errors, the fatal error handler is called.
962 *
963 * The error handling path should be error free, even for out-of-memory
964 * errors, to ensure safe sandboxing. (As of Duktape 1.4.0 this is not
965 * yet the case, see XXX notes below.)
966 */
967
969 duk_idx_t num_stack_args,
970 duk_small_uint_t call_flags) {
971 duk_context *ctx;
972 duk_size_t entry_valstack_bottom_index;
973 duk_size_t entry_valstack_end;
974 duk_size_t entry_callstack_top;
975 duk_size_t entry_catchstack_top;
976 duk_int_t entry_call_recursion_depth;
977 duk_hthread *entry_curr_thread;
978 duk_uint_fast8_t entry_thread_state;
979 duk_instr_t **entry_ptr_curr_pc;
980 duk_jmpbuf *old_jmpbuf_ptr = NULL;
981 duk_jmpbuf our_jmpbuf;
982 duk_idx_t idx_func; /* valstack index of 'func' and retval (relative to entry valstack_bottom) */
983
984 /* XXX: Multiple tv_func lookups are now avoided by making a local
985 * copy of tv_func. Another approach would be to compute an offset
986 * for tv_func from valstack bottom and recomputing the tv_func
987 * pointer quickly as valstack + offset instead of calling duk_get_tval().
988 */
989
990 ctx = (duk_context *) thr;
991 DUK_UNREF(ctx);
992 DUK_ASSERT(thr != NULL);
994 DUK_ASSERT(num_stack_args >= 0);
995 /* XXX: currently NULL allocations are not supported; remove if later allowed */
996 DUK_ASSERT(thr->valstack != NULL);
997 DUK_ASSERT(thr->callstack != NULL);
998 DUK_ASSERT(thr->catchstack != NULL);
999
1000 /* Argument validation and func/args offset. */
1001 idx_func = duk__get_idx_func(thr, num_stack_args);
1002
1003 /* Preliminaries, required by setjmp() handler. Must be careful not
1004 * to throw an unintended error here.
1005 */
1006
1007 entry_valstack_bottom_index = (duk_size_t) (thr->valstack_bottom - thr->valstack);
1008#if defined(DUK_USE_PREFER_SIZE)
1009 entry_valstack_end = (duk_size_t) (thr->valstack_end - thr->valstack);
1010#else
1011 DUK_ASSERT((duk_size_t) (thr->valstack_end - thr->valstack) == thr->valstack_size);
1012 entry_valstack_end = thr->valstack_size;
1013#endif
1014 entry_callstack_top = thr->callstack_top;
1015 entry_catchstack_top = thr->catchstack_top;
1016 entry_call_recursion_depth = thr->heap->call_recursion_depth;
1017 entry_curr_thread = thr->heap->curr_thread; /* Note: may be NULL if first call */
1018 entry_thread_state = thr->state;
1019 entry_ptr_curr_pc = thr->ptr_curr_pc; /* may be NULL */
1020
1021 DUK_DD(DUK_DDPRINT("duk_handle_call_protected: thr=%p, num_stack_args=%ld, "
1022 "call_flags=0x%08lx (ignorerec=%ld, constructor=%ld), "
1023 "valstack_top=%ld, idx_func=%ld, idx_args=%ld, rec_depth=%ld/%ld, "
1024 "entry_valstack_bottom_index=%ld, entry_callstack_top=%ld, entry_catchstack_top=%ld, "
1025 "entry_call_recursion_depth=%ld, entry_curr_thread=%p, entry_thread_state=%ld",
1026 (void *) thr,
1027 (long) num_stack_args,
1028 (unsigned long) call_flags,
1029 (long) ((call_flags & DUK_CALL_FLAG_IGNORE_RECLIMIT) != 0 ? 1 : 0),
1030 (long) ((call_flags & DUK_CALL_FLAG_CONSTRUCTOR_CALL) != 0 ? 1 : 0),
1031 (long) duk_get_top(ctx),
1032 (long) idx_func,
1033 (long) (idx_func + 2),
1034 (long) thr->heap->call_recursion_depth,
1035 (long) thr->heap->call_recursion_limit,
1036 (long) entry_valstack_bottom_index,
1037 (long) entry_callstack_top,
1038 (long) entry_catchstack_top,
1039 (long) entry_call_recursion_depth,
1040 (void *) entry_curr_thread,
1041 (long) entry_thread_state));
1042
1043 old_jmpbuf_ptr = thr->heap->lj.jmpbuf_ptr;
1044 thr->heap->lj.jmpbuf_ptr = &our_jmpbuf;
1045
1046#if defined(DUK_USE_CPP_EXCEPTIONS)
1047 try {
1048#else
1049 DUK_ASSERT(thr->heap->lj.jmpbuf_ptr == &our_jmpbuf);
1050 if (DUK_SETJMP(our_jmpbuf.jb) == 0) {
1051#endif
1052 /* Call handling and success path. Success path exit cleans
1053 * up almost all state.
1054 */
1055 duk__handle_call_inner(thr, num_stack_args, call_flags, idx_func);
1056
1057 /* Success path handles */
1058 DUK_ASSERT(thr->heap->call_recursion_depth == entry_call_recursion_depth);
1059 DUK_ASSERT(thr->ptr_curr_pc == entry_ptr_curr_pc);
1060
1061 /* Longjmp state is kept clean in success path */
1063 DUK_ASSERT(thr->heap->lj.iserror == 0);
1066
1067 thr->heap->lj.jmpbuf_ptr = old_jmpbuf_ptr;
1068
1069 return DUK_EXEC_SUCCESS;
1070#if defined(DUK_USE_CPP_EXCEPTIONS)
1071 } catch (duk_internal_exception &exc) {
1072#else
1073 } else {
1074#endif
1075 /* Error; error value is in heap->lj.value1. */
1076
1077#if defined(DUK_USE_CPP_EXCEPTIONS)
1078 DUK_UNREF(exc);
1079#endif
1080
1082 entry_valstack_bottom_index,
1083 entry_valstack_end,
1084 entry_catchstack_top,
1085 entry_callstack_top,
1086 entry_call_recursion_depth,
1087 entry_curr_thread,
1088 entry_thread_state,
1089 entry_ptr_curr_pc,
1090 idx_func,
1091 old_jmpbuf_ptr);
1092
1093 /* Longjmp state is cleaned up by error handling */
1095 DUK_ASSERT(thr->heap->lj.iserror == 0);
1098 return DUK_EXEC_ERROR;
1099 }
1100#if defined(DUK_USE_CPP_EXCEPTIONS)
1101 catch (std::exception &exc) {
1102 const char *what = exc.what();
1103 if (!what) {
1104 what = "unknown";
1105 }
1106 DUK_D(DUK_DPRINT("unexpected c++ std::exception (perhaps thrown by user code)"));
1107 try {
1108 DUK_ERROR_FMT1(thr, DUK_ERR_API_ERROR, "caught invalid c++ std::exception '%s' (perhaps thrown by user code)", what);
1109 } catch (duk_internal_exception exc) {
1110 DUK_D(DUK_DPRINT("caught api error thrown from unexpected c++ std::exception"));
1111 DUK_UNREF(exc);
1113 entry_valstack_bottom_index,
1114 entry_valstack_end,
1115 entry_catchstack_top,
1116 entry_callstack_top,
1117 entry_call_recursion_depth,
1118 entry_curr_thread,
1119 entry_thread_state,
1120 entry_ptr_curr_pc,
1121 idx_func,
1122 old_jmpbuf_ptr);
1123 return DUK_EXEC_ERROR;
1124 }
1125 } catch (...) {
1126 DUK_D(DUK_DPRINT("unexpected c++ exception (perhaps thrown by user code)"));
1127 try {
1128 DUK_ERROR_API(thr, "caught invalid c++ exception (perhaps thrown by user code)");
1129 } catch (duk_internal_exception exc) {
1130 DUK_D(DUK_DPRINT("caught api error thrown from unexpected c++ exception"));
1131 DUK_UNREF(exc);
1133 entry_valstack_bottom_index,
1134 entry_valstack_end,
1135 entry_catchstack_top,
1136 entry_callstack_top,
1137 entry_call_recursion_depth,
1138 entry_curr_thread,
1139 entry_thread_state,
1140 entry_ptr_curr_pc,
1141 idx_func,
1142 old_jmpbuf_ptr);
1143 return DUK_EXEC_ERROR;
1144 }
1145 }
1146#endif
1147}
1148
1150 duk_idx_t num_stack_args,
1151 duk_small_uint_t call_flags) {
1152 duk_idx_t idx_func; /* valstack index of 'func' and retval (relative to entry valstack_bottom) */
1153
1154 /* Argument validation and func/args offset. */
1155 idx_func = duk__get_idx_func(thr, num_stack_args);
1156
1157 duk__handle_call_inner(thr, num_stack_args, call_flags, idx_func);
1158}
1159
1161 duk_idx_t num_stack_args,
1162 duk_small_uint_t call_flags,
1163 duk_idx_t idx_func) {
1164 duk_context *ctx;
1165 duk_size_t entry_valstack_bottom_index;
1166 duk_size_t entry_valstack_end;
1167 duk_size_t entry_callstack_top;
1168 duk_size_t entry_catchstack_top;
1169 duk_int_t entry_call_recursion_depth;
1170 duk_hthread *entry_curr_thread;
1171 duk_uint_fast8_t entry_thread_state;
1172 duk_instr_t **entry_ptr_curr_pc;
1173 duk_idx_t nargs; /* # argument registers target function wants (< 0 => "as is") */
1174 duk_idx_t nregs; /* # total registers target function wants on entry (< 0 => "as is") */
1175 duk_hobject *func; /* 'func' on stack (borrowed reference) */
1176 duk_tval *tv_func; /* duk_tval ptr for 'func' on stack (borrowed reference) or tv_func_copy */
1177 duk_tval tv_func_copy; /* to avoid relookups */
1178 duk_activation *act;
1179 duk_hobject *env;
1180 duk_ret_t rc;
1181
1182 ctx = (duk_context *) thr;
1183 DUK_ASSERT(thr != NULL);
1185 DUK_ASSERT(ctx != NULL);
1186 DUK_ASSERT(num_stack_args >= 0);
1187 /* XXX: currently NULL allocations are not supported; remove if later allowed */
1188 DUK_ASSERT(thr->valstack != NULL);
1189 DUK_ASSERT(thr->callstack != NULL);
1190 DUK_ASSERT(thr->catchstack != NULL);
1191
1192 DUK_DD(DUK_DDPRINT("duk__handle_call_inner: num_stack_args=%ld, call_flags=0x%08lx, top=%ld",
1193 (long) num_stack_args, (long) call_flags, (long) duk_get_top(ctx)));
1194
1195 /*
1196 * Store entry state.
1197 */
1198
1199 entry_valstack_bottom_index = (duk_size_t) (thr->valstack_bottom - thr->valstack);
1200#if defined(DUK_USE_PREFER_SIZE)
1201 entry_valstack_end = (duk_size_t) (thr->valstack_end - thr->valstack);
1202#else
1203 DUK_ASSERT((duk_size_t) (thr->valstack_end - thr->valstack) == thr->valstack_size);
1204 entry_valstack_end = thr->valstack_size;
1205#endif
1206 entry_callstack_top = thr->callstack_top;
1207 entry_catchstack_top = thr->catchstack_top;
1208 entry_call_recursion_depth = thr->heap->call_recursion_depth;
1209 entry_curr_thread = thr->heap->curr_thread; /* Note: may be NULL if first call */
1210 entry_thread_state = thr->state;
1211 entry_ptr_curr_pc = thr->ptr_curr_pc; /* may be NULL */
1212
1213 /* If thr->ptr_curr_pc is set, sync curr_pc to act->pc. Then NULL
1214 * thr->ptr_curr_pc so that it's not accidentally used with an incorrect
1215 * activation when side effects occur.
1216 */
1218
1219 DUK_DD(DUK_DDPRINT("duk__handle_call_inner: thr=%p, num_stack_args=%ld, "
1220 "call_flags=0x%08lx (ignorerec=%ld, constructor=%ld), "
1221 "valstack_top=%ld, idx_func=%ld, idx_args=%ld, rec_depth=%ld/%ld, "
1222 "entry_valstack_bottom_index=%ld, entry_callstack_top=%ld, entry_catchstack_top=%ld, "
1223 "entry_call_recursion_depth=%ld, entry_curr_thread=%p, entry_thread_state=%ld",
1224 (void *) thr,
1225 (long) num_stack_args,
1226 (unsigned long) call_flags,
1227 (long) ((call_flags & DUK_CALL_FLAG_IGNORE_RECLIMIT) != 0 ? 1 : 0),
1228 (long) ((call_flags & DUK_CALL_FLAG_CONSTRUCTOR_CALL) != 0 ? 1 : 0),
1229 (long) duk_get_top(ctx),
1230 (long) idx_func,
1231 (long) (idx_func + 2),
1232 (long) thr->heap->call_recursion_depth,
1233 (long) thr->heap->call_recursion_limit,
1234 (long) entry_valstack_bottom_index,
1235 (long) entry_callstack_top,
1236 (long) entry_catchstack_top,
1237 (long) entry_call_recursion_depth,
1238 (void *) entry_curr_thread,
1239 (long) entry_thread_state));
1240
1241
1242 /*
1243 * Thread state check and book-keeping.
1244 */
1245
1246 if (thr == thr->heap->curr_thread) {
1247 /* same thread */
1248 if (thr->state != DUK_HTHREAD_STATE_RUNNING) {
1249 /* should actually never happen, but check anyway */
1250 goto thread_state_error;
1251 }
1252 } else {
1253 /* different thread */
1254 DUK_ASSERT(thr->heap->curr_thread == NULL ||
1256 if (thr->state != DUK_HTHREAD_STATE_INACTIVE) {
1257 goto thread_state_error;
1258 }
1259 DUK_HEAP_SWITCH_THREAD(thr->heap, thr);
1261
1262 /* Note: multiple threads may be simultaneously in the RUNNING
1263 * state, but not in the same "resume chain".
1264 */
1265 }
1266 DUK_ASSERT(thr->heap->curr_thread == thr);
1268
1269 /*
1270 * C call recursion depth check, which provides a reasonable upper
1271 * bound on maximum C stack size (arbitrary C stack growth is only
1272 * possible by recursive handle_call / handle_safe_call calls).
1273 */
1274
1275 /* XXX: remove DUK_CALL_FLAG_IGNORE_RECLIMIT flag: there's now the
1276 * reclimit bump?
1277 */
1278
1281 if (call_flags & DUK_CALL_FLAG_IGNORE_RECLIMIT) {
1282 DUK_DD(DUK_DDPRINT("ignoring reclimit for this call (probably an errhandler call)"));
1283 } else {
1285 /* XXX: error message is a bit misleading: we reached a recursion
1286 * limit which is also essentially the same as a C callstack limit
1287 * (except perhaps with some relaxed threading assumptions).
1288 */
1290 }
1291 thr->heap->call_recursion_depth++;
1292 }
1293
1294 /*
1295 * Check the function type, handle bound function chains, and prepare
1296 * parameters for the rest of the call handling. Also figure out the
1297 * effective 'this' binding, which replaces the current value at
1298 * idx_func + 1.
1299 *
1300 * If the target function is a 'bound' one, follow the chain of 'bound'
1301 * functions until a non-bound function is found. During this process,
1302 * bound arguments are 'prepended' to existing ones, and the "this"
1303 * binding is overridden. See E5 Section 15.3.4.5.1.
1304 *
1305 * Lightfunc detection happens here too. Note that lightweight functions
1306 * can be wrapped by (non-lightweight) bound functions so we must resolve
1307 * the bound function chain first.
1308 */
1309
1310 func = duk__nonbound_func_lookup(ctx, idx_func, &num_stack_args, &tv_func, call_flags);
1311 DUK_TVAL_SET_TVAL(&tv_func_copy, tv_func);
1312 tv_func = &tv_func_copy; /* local copy to avoid relookups */
1313
1314 DUK_ASSERT(func == NULL || !DUK_HOBJECT_HAS_BOUND(func));
1317
1318 duk__coerce_effective_this_binding(thr, func, idx_func + 1);
1319 DUK_DDD(DUK_DDDPRINT("effective 'this' binding is: %!T",
1320 (duk_tval *) duk_get_tval(ctx, idx_func + 1)));
1321
1322 /* [ ... func this arg1 ... argN ] */
1323
1324 /*
1325 * Setup a preliminary activation and figure out nargs/nregs.
1326 *
1327 * Don't touch valstack_bottom or valstack_top yet so that Duktape API
1328 * calls work normally.
1329 */
1330
1332
1333 if (thr->callstack_top > 0) {
1334 /*
1335 * Update idx_retval of current activation.
1336 *
1337 * Although it might seem this is not necessary (bytecode executor
1338 * does this for Ecmascript-to-Ecmascript calls; other calls are
1339 * handled here), this turns out to be necessary for handling yield
1340 * and resume. For them, an Ecmascript-to-native call happens, and
1341 * the Ecmascript call's idx_retval must be set for things to work.
1342 */
1343
1344 (thr->callstack + thr->callstack_top - 1)->idx_retval = entry_valstack_bottom_index + idx_func;
1345 }
1346
1348 act = thr->callstack + thr->callstack_top;
1349 thr->callstack_top++;
1351 DUK_ASSERT(thr->valstack_top > thr->valstack_bottom); /* at least effective 'this' */
1352 DUK_ASSERT(func == NULL || !DUK_HOBJECT_HAS_BOUND(func));
1353
1354 act->flags = 0;
1355
1356 /* For now all calls except Ecma-to-Ecma calls prevent a yield. */
1358 if (call_flags & DUK_CALL_FLAG_CONSTRUCTOR_CALL) {
1360 }
1361 if (call_flags & DUK_CALL_FLAG_DIRECT_EVAL) {
1363 }
1364
1365 /* These base values are never used, but if the compiler doesn't know
1366 * that DUK_ERROR() won't return, these are needed to silence warnings.
1367 * On the other hand, scan-build will warn about the values not being
1368 * used, so add a DUK_UNREF.
1369 */
1370 nargs = 0; DUK_UNREF(nargs);
1371 nregs = 0; DUK_UNREF(nregs);
1372
1373 if (DUK_LIKELY(func != NULL)) {
1374 if (DUK_HOBJECT_HAS_STRICT(func)) {
1375 act->flags |= DUK_ACT_FLAG_STRICT;
1376 }
1378 nargs = ((duk_hcompiledfunction *) func)->nargs;
1379 nregs = ((duk_hcompiledfunction *) func)->nregs;
1380 DUK_ASSERT(nregs >= nargs);
1381 } else if (DUK_HOBJECT_IS_NATIVEFUNCTION(func)) {
1382 /* Note: nargs (and nregs) may be negative for a native,
1383 * function, which indicates that the function wants the
1384 * input stack "as is" (i.e. handles "vararg" arguments).
1385 */
1386 nargs = ((duk_hnativefunction *) func)->nargs;
1387 nregs = nargs;
1388 } else {
1389 /* XXX: this should be an assert */
1391 }
1392 } else {
1393 duk_small_uint_t lf_flags;
1394
1396 lf_flags = DUK_TVAL_GET_LIGHTFUNC_FLAGS(tv_func);
1397 nargs = DUK_LFUNC_FLAGS_GET_NARGS(lf_flags);
1398 if (nargs == DUK_LFUNC_NARGS_VARARGS) {
1399 nargs = -1; /* vararg */
1400 }
1401 nregs = nargs;
1402
1403 act->flags |= DUK_ACT_FLAG_STRICT;
1404 }
1405
1406 act->func = func; /* NULL for lightfunc */
1407 act->var_env = NULL;
1408 act->lex_env = NULL;
1409#if defined(DUK_USE_NONSTD_FUNC_CALLER_PROPERTY)
1410 act->prev_caller = NULL;
1411#endif
1412 act->curr_pc = NULL;
1413#if defined(DUK_USE_DEBUGGER_SUPPORT)
1414 act->prev_line = 0;
1415#endif
1416 act->idx_bottom = entry_valstack_bottom_index + idx_func + 2;
1417#if 0 /* topmost activation idx_retval is considered garbage, no need to init */
1418 act->idx_retval = 0;
1419#endif
1420 DUK_TVAL_SET_TVAL(&act->tv_func, tv_func); /* borrowed, no refcount */
1421
1422 /* XXX: remove the preventcount and make yield walk the callstack?
1423 * Or perhaps just use a single flag, not a counter, faster to just
1424 * set and restore?
1425 */
1426 if (act->flags & DUK_ACT_FLAG_PREVENT_YIELD) {
1427 /* duk_hthread_callstack_unwind() will decrease this on unwind */
1429 }
1430
1431 /* XXX: Is this INCREF necessary? 'func' is always a borrowed
1432 * reference reachable through the value stack? If changed, stack
1433 * unwind code also needs to be fixed to match.
1434 */
1435 DUK_HOBJECT_INCREF_ALLOWNULL(thr, func); /* act->func */
1436
1437#if defined(DUK_USE_NONSTD_FUNC_CALLER_PROPERTY)
1438 if (func) {
1439 duk__update_func_caller_prop(thr, func);
1440 act = thr->callstack + thr->callstack_top - 1;
1441 }
1442#endif
1443
1444 /* [ ... func this arg1 ... argN ] */
1445
1446 /*
1447 * Environment record creation and 'arguments' object creation.
1448 * Named function expression name binding is handled by the
1449 * compiler; the compiled function's parent env will contain
1450 * the (immutable) binding already.
1451 *
1452 * This handling is now identical for C and Ecmascript functions.
1453 * C functions always have the 'NEWENV' flag set, so their
1454 * environment record initialization is delayed (which is good).
1455 *
1456 * Delayed creation (on demand) is handled in duk_js_var.c.
1457 */
1458
1459 DUK_ASSERT(func == NULL || !DUK_HOBJECT_HAS_BOUND(func)); /* bound function chain has already been resolved */
1460
1461 if (DUK_LIKELY(func != NULL)) {
1464 /* Use a new environment but there's no 'arguments' object;
1465 * delayed environment initialization. This is the most
1466 * common case.
1467 */
1468 DUK_ASSERT(act->lex_env == NULL);
1469 DUK_ASSERT(act->var_env == NULL);
1470 } else {
1471 /* Use a new environment and there's an 'arguments' object.
1472 * We need to initialize it right now.
1473 */
1474
1475 /* third arg: absolute index (to entire valstack) of idx_bottom of new activation */
1477 DUK_ASSERT(env != NULL);
1478
1479 /* [ ... func this arg1 ... argN envobj ] */
1480
1482 duk__handle_createargs_for_call(thr, func, env, num_stack_args);
1483
1484 /* [ ... func this arg1 ... argN envobj ] */
1485
1486 act = thr->callstack + thr->callstack_top - 1;
1487 act->lex_env = env;
1488 act->var_env = env;
1489 DUK_HOBJECT_INCREF(thr, env);
1490 DUK_HOBJECT_INCREF(thr, env); /* XXX: incref by count (2) directly */
1491 duk_pop(ctx);
1492 }
1493 } else {
1494 /* Use existing env (e.g. for non-strict eval); cannot have
1495 * an own 'arguments' object (but can refer to an existing one).
1496 */
1497
1499
1500 duk__handle_oldenv_for_call(thr, func, act);
1501 /* No need to re-lookup 'act' at present: no side effects. */
1502
1503 DUK_ASSERT(act->lex_env != NULL);
1504 DUK_ASSERT(act->var_env != NULL);
1505 }
1506 } else {
1507 /* Lightfuncs are always native functions and have "newenv". */
1508 DUK_ASSERT(act->lex_env == NULL);
1509 DUK_ASSERT(act->var_env == NULL);
1510 }
1511
1512 /* [ ... func this arg1 ... argN ] */
1513
1514 /*
1515 * Setup value stack: clamp to 'nargs', fill up to 'nregs'
1516 *
1517 * Value stack may either grow or shrink, depending on the
1518 * number of func registers and the number of actual arguments.
1519 * If nregs >= 0, func wants args clamped to 'nargs'; else it
1520 * wants all args (= 'num_stack_args').
1521 */
1522
1523 /* XXX: optimize value stack operation */
1524 /* XXX: don't want to shrink allocation here */
1525
1527 num_stack_args,
1528 idx_func + 2,
1529 nregs,
1530 nargs,
1531 func);
1532
1533 /*
1534 * Determine call type, then finalize activation, shift to
1535 * new value stack bottom, and call the target.
1536 */
1537
1538 act = thr->callstack + thr->callstack_top - 1;
1539 if (func != NULL && DUK_HOBJECT_IS_COMPILEDFUNCTION(func)) {
1540 /*
1541 * Ecmascript call
1542 */
1543
1544 duk_tval *tv_ret;
1545 duk_tval *tv_funret;
1546
1547 DUK_ASSERT(func != NULL);
1550
1551 thr->valstack_bottom = thr->valstack_bottom + idx_func + 2;
1552 /* keep current valstack_top */
1553 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
1555 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
1556
1557 /* [ ... func this | arg1 ... argN ] ('this' must precede new bottom) */
1558
1559 /*
1560 * Bytecode executor call.
1561 *
1562 * Execute bytecode, handling any recursive function calls and
1563 * thread resumptions. Returns when execution would return from
1564 * the entry level activation. When the executor returns, a
1565 * single return value is left on the stack top.
1566 *
1567 * The only possible longjmp() is an error (DUK_LJ_TYPE_THROW),
1568 * other types are handled internally by the executor.
1569 */
1570
1571 /* thr->ptr_curr_pc is set by bytecode executor early on entry */
1572 DUK_ASSERT(thr->ptr_curr_pc == NULL);
1573 DUK_DDD(DUK_DDDPRINT("entering bytecode execution"));
1575 DUK_DDD(DUK_DDDPRINT("returned from bytecode execution"));
1576
1577 /* Unwind. */
1578
1579 DUK_ASSERT(thr->catchstack_top >= entry_catchstack_top); /* may need unwind */
1580 DUK_ASSERT(thr->callstack_top == entry_callstack_top + 1);
1581 DUK_ASSERT(thr->callstack_top == entry_callstack_top + 1);
1582 duk_hthread_catchstack_unwind(thr, entry_catchstack_top);
1584 duk_hthread_callstack_unwind(thr, entry_callstack_top);
1586
1587 thr->valstack_bottom = thr->valstack + entry_valstack_bottom_index;
1588 /* keep current valstack_top */
1589 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
1591 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
1592 DUK_ASSERT(thr->valstack_top - thr->valstack_bottom >= idx_func + 1);
1593
1594 /* Return value handling. */
1595
1596 /* [ ... func this (crud) retval ] */
1597
1598 tv_ret = thr->valstack_bottom + idx_func;
1599 tv_funret = thr->valstack_top - 1;
1600#if defined(DUK_USE_FASTINT)
1601 /* Explicit check for fastint downgrade. */
1602 DUK_TVAL_CHKFAST_INPLACE(tv_funret);
1603#endif
1604 DUK_TVAL_SET_TVAL_UPDREF(thr, tv_ret, tv_funret); /* side effects */
1605 } else {
1606 /*
1607 * Native call.
1608 */
1609
1610 duk_tval *tv_ret;
1611 duk_tval *tv_funret;
1612
1613 thr->valstack_bottom = thr->valstack_bottom + idx_func + 2;
1614 /* keep current valstack_top */
1615 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
1617 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
1618 DUK_ASSERT(func == NULL || ((duk_hnativefunction *) func)->func != NULL);
1619
1620 /* [ ... func this | arg1 ... argN ] ('this' must precede new bottom) */
1621
1622 /* For native calls must be NULL so we don't sync back */
1623 DUK_ASSERT(thr->ptr_curr_pc == NULL);
1624
1625 if (func) {
1626 rc = ((duk_hnativefunction *) func)->func((duk_context *) thr);
1627 } else {
1629 rc = funcptr((duk_context *) thr);
1630 }
1631
1632 /* Automatic error throwing, retval check. */
1633
1634 if (rc < 0) {
1637 } else if (rc > 1) {
1638 DUK_ERROR_API(thr, "c function returned invalid rc");
1639 }
1640 DUK_ASSERT(rc == 0 || rc == 1);
1641
1642 /* Unwind. */
1643
1644 DUK_ASSERT(thr->catchstack_top == entry_catchstack_top); /* no need to unwind */
1645 DUK_ASSERT(thr->callstack_top == entry_callstack_top + 1);
1646 duk_hthread_callstack_unwind(thr, entry_callstack_top);
1648
1649 thr->valstack_bottom = thr->valstack + entry_valstack_bottom_index;
1650 /* keep current valstack_top */
1651 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
1653 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
1654 DUK_ASSERT(thr->valstack_top - thr->valstack_bottom >= idx_func + 1);
1655
1656 /* Return value handling. */
1657
1658 /* XXX: should this happen in the callee's activation or after unwinding? */
1659 tv_ret = thr->valstack_bottom + idx_func;
1660 if (rc == 0) {
1661 DUK_TVAL_SET_UNDEFINED_UPDREF(thr, tv_ret); /* side effects */
1662 } else {
1663 /* [ ... func this (crud) retval ] */
1664 tv_funret = thr->valstack_top - 1;
1665#if defined(DUK_USE_FASTINT)
1666 /* Explicit check for fastint downgrade. */
1667 DUK_TVAL_CHKFAST_INPLACE(tv_funret);
1668#endif
1669 DUK_TVAL_SET_TVAL_UPDREF(thr, tv_ret, tv_funret); /* side effects */
1670 }
1671 }
1672
1673 duk_set_top(ctx, idx_func + 1); /* XXX: unnecessary, handle in adjust */
1674
1675 /* [ ... retval ] */
1676
1677 /* Ensure there is internal valstack spare before we exit; this may
1678 * throw an alloc error. The same guaranteed size must be available
1679 * as before the call. This is not optimal now: we store the valstack
1680 * allocated size during entry; this value may be higher than the
1681 * minimal guarantee for an application.
1682 */
1683
1684 /* XXX: we should never shrink here; when we error out later, we'd
1685 * need to potentially grow the value stack in error unwind which could
1686 * cause another error.
1687 */
1688
1689 (void) duk_valstack_resize_raw((duk_context *) thr,
1690 entry_valstack_end, /* same as during entry */
1691 DUK_VSRESIZE_FLAG_SHRINK | /* flags */
1694
1695 /* Restore entry thread executor curr_pc stack frame pointer. */
1696 thr->ptr_curr_pc = entry_ptr_curr_pc;
1697
1698 DUK_HEAP_SWITCH_THREAD(thr->heap, entry_curr_thread); /* may be NULL */
1699 thr->state = (duk_uint8_t) entry_thread_state;
1700
1701 DUK_ASSERT((thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread == NULL) || /* first call */
1702 (thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread != NULL) || /* other call */
1703 (thr->state == DUK_HTHREAD_STATE_RUNNING && thr->heap->curr_thread == thr)); /* current thread */
1704
1705 thr->heap->call_recursion_depth = entry_call_recursion_depth;
1706
1707 /* If the debugger is active we need to force an interrupt so that
1708 * debugger breakpoints are rechecked. This is important for function
1709 * calls caused by side effects (e.g. when doing a DUK_OP_GETPROP), see
1710 * GH-303. Only needed for success path, error path always causes a
1711 * breakpoint recheck in the executor. It would be enough to set this
1712 * only when returning to an Ecmascript activation, but setting the flag
1713 * on every return should have no ill effect.
1714 */
1715#if defined(DUK_USE_DEBUGGER_SUPPORT)
1716 if (DUK_HEAP_IS_DEBUGGER_ATTACHED(thr->heap)) {
1717 DUK_DD(DUK_DDPRINT("returning with debugger enabled, force interrupt"));
1718 DUK_ASSERT(thr->interrupt_counter <= thr->interrupt_init);
1719 thr->interrupt_init -= thr->interrupt_counter;
1720 thr->interrupt_counter = 0;
1721 thr->heap->dbg_force_restart = 1;
1722 }
1723#endif
1724
1725#if defined(DUK_USE_INTERRUPT_COUNTER) && defined(DUK_USE_DEBUG)
1726 duk__interrupt_fixup(thr, entry_curr_thread);
1727#endif
1728
1729 return;
1730
1731 thread_state_error:
1732 DUK_ERROR_FMT1(thr, DUK_ERR_TYPE_ERROR, "invalid thread state for call (%ld)", (long) thr->state);
1734 return; /* never executed */
1735}
1736
1738 duk_size_t entry_valstack_bottom_index,
1739 duk_size_t entry_valstack_end,
1740 duk_size_t entry_catchstack_top,
1741 duk_size_t entry_callstack_top,
1742 duk_int_t entry_call_recursion_depth,
1743 duk_hthread *entry_curr_thread,
1744 duk_uint_fast8_t entry_thread_state,
1745 duk_instr_t **entry_ptr_curr_pc,
1746 duk_idx_t idx_func,
1747 duk_jmpbuf *old_jmpbuf_ptr) {
1748 duk_context *ctx;
1749 duk_tval *tv_ret;
1750
1751 ctx = (duk_context *) thr;
1752
1753 DUK_DDD(DUK_DDDPRINT("error caught during duk__handle_call_inner(): %!T",
1754 (duk_tval *) &thr->heap->lj.value1));
1755
1756 /* Other longjmp types are handled by executor before propagating
1757 * the error here.
1758 */
1760 DUK_ASSERT(thr->callstack_top >= entry_callstack_top);
1761 DUK_ASSERT(thr->catchstack_top >= entry_catchstack_top);
1762
1763 /* We don't need to sync back thr->ptr_curr_pc here because
1764 * the bytecode executor always has a setjmp catchpoint which
1765 * does that before errors propagate to here.
1766 */
1767 DUK_ASSERT(thr->ptr_curr_pc == NULL);
1768
1769 /* Restore the previous setjmp catcher so that any error in
1770 * error handling will propagate outwards rather than re-enter
1771 * the same handler. However, the error handling path must be
1772 * designed to be error free so that sandboxing guarantees are
1773 * reliable, see e.g. https://github.com/svaarala/duktape/issues/476.
1774 */
1775 thr->heap->lj.jmpbuf_ptr = old_jmpbuf_ptr;
1776
1777 /* XXX: callstack unwind may now throw an error when closing
1778 * scopes; this is a sandboxing issue, described in:
1779 * https://github.com/svaarala/duktape/issues/476
1780 */
1781 duk_hthread_catchstack_unwind(thr, entry_catchstack_top);
1783 duk_hthread_callstack_unwind(thr, entry_callstack_top);
1785
1786 thr->valstack_bottom = thr->valstack + entry_valstack_bottom_index;
1787 tv_ret = thr->valstack_bottom + idx_func; /* XXX: byte offset? */
1788 DUK_TVAL_SET_TVAL_UPDREF(thr, tv_ret, &thr->heap->lj.value1); /* side effects */
1789#if defined(DUK_USE_FASTINT)
1790 /* Explicit check for fastint downgrade. */
1792#endif
1793 duk_set_top(ctx, idx_func + 1); /* XXX: could be eliminated with valstack adjust */
1794
1795 /* [ ... errobj ] */
1796
1797 /* Ensure there is internal valstack spare before we exit; this may
1798 * throw an alloc error. The same guaranteed size must be available
1799 * as before the call. This is not optimal now: we store the valstack
1800 * allocated size during entry; this value may be higher than the
1801 * minimal guarantee for an application.
1802 */
1803
1804 /* XXX: this needs to be reworked so that we never shrink the value
1805 * stack on function entry so that we never need to grow it here.
1806 * Needing to grow here is a sandboxing issue because we need to
1807 * allocate which may cause an error in the error handling path
1808 * and thus propagate an error out of a protected call.
1809 */
1810
1811 (void) duk_valstack_resize_raw((duk_context *) thr,
1812 entry_valstack_end, /* same as during entry */
1813 DUK_VSRESIZE_FLAG_SHRINK | /* flags */
1816
1817
1818 /* These are just convenience "wiping" of state. Side effects should
1819 * not be an issue here: thr->heap and thr->heap->lj have a stable
1820 * pointer. Finalizer runs etc capture even out-of-memory errors so
1821 * nothing should throw here.
1822 */
1824 thr->heap->lj.iserror = 0;
1825 DUK_TVAL_SET_UNDEFINED_UPDREF(thr, &thr->heap->lj.value1); /* side effects */
1826 DUK_TVAL_SET_UNDEFINED_UPDREF(thr, &thr->heap->lj.value2); /* side effects */
1827
1828 /* Restore entry thread executor curr_pc stack frame pointer. */
1829 thr->ptr_curr_pc = entry_ptr_curr_pc;
1830
1831 DUK_HEAP_SWITCH_THREAD(thr->heap, entry_curr_thread); /* may be NULL */
1832 thr->state = (duk_uint8_t) entry_thread_state;
1833
1834 DUK_ASSERT((thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread == NULL) || /* first call */
1835 (thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread != NULL) || /* other call */
1836 (thr->state == DUK_HTHREAD_STATE_RUNNING && thr->heap->curr_thread == thr)); /* current thread */
1837
1838 thr->heap->call_recursion_depth = entry_call_recursion_depth;
1839
1840 /* If the debugger is active we need to force an interrupt so that
1841 * debugger breakpoints are rechecked. This is important for function
1842 * calls caused by side effects (e.g. when doing a DUK_OP_GETPROP), see
1843 * GH-303. Only needed for success path, error path always causes a
1844 * breakpoint recheck in the executor. It would be enough to set this
1845 * only when returning to an Ecmascript activation, but setting the flag
1846 * on every return should have no ill effect.
1847 */
1848#if defined(DUK_USE_DEBUGGER_SUPPORT)
1849 if (DUK_HEAP_IS_DEBUGGER_ATTACHED(thr->heap)) {
1850 DUK_DD(DUK_DDPRINT("returning with debugger enabled, force interrupt"));
1851 DUK_ASSERT(thr->interrupt_counter <= thr->interrupt_init);
1852 thr->interrupt_init -= thr->interrupt_counter;
1853 thr->interrupt_counter = 0;
1854 thr->heap->dbg_force_restart = 1;
1855 }
1856#endif
1857
1858#if defined(DUK_USE_INTERRUPT_COUNTER) && defined(DUK_USE_DEBUG)
1859 duk__interrupt_fixup(thr, entry_curr_thread);
1860#endif
1861}
1862
1863/*
1864 * duk_handle_safe_call(): make a "C protected call" within the
1865 * current activation.
1866 *
1867 * The allowed thread states for making a call are the same as for
1868 * duk_handle_call_xxx().
1869 *
1870 * Error handling is similar to duk_handle_call_xxx(); errors may be thrown
1871 * (and result in a fatal error) for insane arguments.
1872 */
1873
1874/* XXX: bump preventcount by one for the duration of this call? */
1875
1878 duk_idx_t num_stack_args,
1879 duk_idx_t num_stack_rets) {
1880 duk_context *ctx = (duk_context *) thr;
1881 duk_size_t entry_valstack_bottom_index;
1882 duk_size_t entry_callstack_top;
1883 duk_size_t entry_catchstack_top;
1884 duk_int_t entry_call_recursion_depth;
1885 duk_hthread *entry_curr_thread;
1886 duk_uint_fast8_t entry_thread_state;
1887 duk_instr_t **entry_ptr_curr_pc;
1888 duk_jmpbuf *old_jmpbuf_ptr = NULL;
1889 duk_jmpbuf our_jmpbuf;
1890 duk_idx_t idx_retbase;
1891 duk_int_t retval;
1892
1893 DUK_ASSERT(thr != NULL);
1894 DUK_ASSERT(ctx != NULL);
1895
1896 /* Note: careful with indices like '-x'; if 'x' is zero, it refers to bottom */
1897 entry_valstack_bottom_index = (duk_size_t) (thr->valstack_bottom - thr->valstack);
1898 entry_callstack_top = thr->callstack_top;
1899 entry_catchstack_top = thr->catchstack_top;
1900 entry_call_recursion_depth = thr->heap->call_recursion_depth;
1901 entry_curr_thread = thr->heap->curr_thread; /* Note: may be NULL if first call */
1902 entry_thread_state = thr->state;
1903 entry_ptr_curr_pc = thr->ptr_curr_pc; /* may be NULL */
1904 idx_retbase = duk_get_top(ctx) - num_stack_args; /* Note: not a valid stack index if num_stack_args == 0 */
1905
1906 /* Note: cannot portably debug print a function pointer, hence 'func' not printed! */
1907 DUK_DD(DUK_DDPRINT("duk_handle_safe_call: thr=%p, num_stack_args=%ld, num_stack_rets=%ld, "
1908 "valstack_top=%ld, idx_retbase=%ld, rec_depth=%ld/%ld, "
1909 "entry_valstack_bottom_index=%ld, entry_callstack_top=%ld, entry_catchstack_top=%ld, "
1910 "entry_call_recursion_depth=%ld, entry_curr_thread=%p, entry_thread_state=%ld",
1911 (void *) thr,
1912 (long) num_stack_args,
1913 (long) num_stack_rets,
1914 (long) duk_get_top(ctx),
1915 (long) idx_retbase,
1916 (long) thr->heap->call_recursion_depth,
1917 (long) thr->heap->call_recursion_limit,
1918 (long) entry_valstack_bottom_index,
1919 (long) entry_callstack_top,
1920 (long) entry_catchstack_top,
1921 (long) entry_call_recursion_depth,
1922 (void *) entry_curr_thread,
1923 (long) entry_thread_state));
1924
1925 if (idx_retbase < 0) {
1926 /* Since stack indices are not reliable, we can't do anything useful
1927 * here. Invoke the existing setjmp catcher, or if it doesn't exist,
1928 * call the fatal error handler.
1929 */
1930
1932 }
1933
1934 /* setjmp catchpoint setup */
1935
1936 old_jmpbuf_ptr = thr->heap->lj.jmpbuf_ptr;
1937 thr->heap->lj.jmpbuf_ptr = &our_jmpbuf;
1938
1939#if defined(DUK_USE_CPP_EXCEPTIONS)
1940 try {
1941#else
1942 DUK_ASSERT(thr->heap->lj.jmpbuf_ptr == &our_jmpbuf);
1943 if (DUK_SETJMP(our_jmpbuf.jb) == 0) {
1944 /* Success path. */
1945#endif
1946 DUK_DDD(DUK_DDDPRINT("safe_call setjmp catchpoint setup complete"));
1947
1949 func,
1950 idx_retbase,
1951 num_stack_rets,
1952 entry_valstack_bottom_index,
1953 entry_callstack_top,
1954 entry_catchstack_top);
1955
1956 /* Longjmp state is kept clean in success path */
1958 DUK_ASSERT(thr->heap->lj.iserror == 0);
1961
1962 /* Note: either pointer may be NULL (at entry), so don't assert */
1963 thr->heap->lj.jmpbuf_ptr = old_jmpbuf_ptr;
1964
1965 retval = DUK_EXEC_SUCCESS;
1966#if defined(DUK_USE_CPP_EXCEPTIONS)
1967 } catch (duk_internal_exception &exc) {
1968 DUK_UNREF(exc);
1969#else
1970 } else {
1971 /* Error path. */
1972#endif
1974 idx_retbase,
1975 num_stack_rets,
1976 entry_valstack_bottom_index,
1977 entry_callstack_top,
1978 entry_catchstack_top,
1979 old_jmpbuf_ptr);
1980
1981 /* Longjmp state is cleaned up by error handling */
1983 DUK_ASSERT(thr->heap->lj.iserror == 0);
1986
1987 retval = DUK_EXEC_ERROR;
1988 }
1989#if defined(DUK_USE_CPP_EXCEPTIONS)
1990 catch (std::exception &exc) {
1991 const char *what = exc.what();
1992 if (!what) {
1993 what = "unknown";
1994 }
1995 DUK_D(DUK_DPRINT("unexpected c++ std::exception (perhaps thrown by user code)"));
1996 try {
1997 DUK_ERROR_FMT1(thr, DUK_ERR_API_ERROR, "caught invalid c++ std::exception '%s' (perhaps thrown by user code)", what);
1998 } catch (duk_internal_exception exc) {
1999 DUK_D(DUK_DPRINT("caught api error thrown from unexpected c++ std::exception"));
2000 DUK_UNREF(exc);
2002 idx_retbase,
2003 num_stack_rets,
2004 entry_valstack_bottom_index,
2005 entry_callstack_top,
2006 entry_catchstack_top,
2007 old_jmpbuf_ptr);
2008 retval = DUK_EXEC_ERROR;
2009 }
2010 } catch (...) {
2011 DUK_D(DUK_DPRINT("unexpected c++ exception (perhaps thrown by user code)"));
2012 try {
2013 DUK_ERROR_API(thr, "caught invalid c++ exception (perhaps thrown by user code)");
2014 } catch (duk_internal_exception exc) {
2015 DUK_D(DUK_DPRINT("caught api error thrown from unexpected c++ exception"));
2016 DUK_UNREF(exc);
2018 idx_retbase,
2019 num_stack_rets,
2020 entry_valstack_bottom_index,
2021 entry_callstack_top,
2022 entry_catchstack_top,
2023 old_jmpbuf_ptr);
2024 retval = DUK_EXEC_ERROR;
2025 }
2026 }
2027#endif
2028
2029 DUK_ASSERT(thr->heap->lj.jmpbuf_ptr == old_jmpbuf_ptr); /* success/error path both do this */
2030
2032 idx_retbase,
2033 num_stack_rets,
2034 entry_call_recursion_depth,
2035 entry_curr_thread,
2036 entry_thread_state,
2037 entry_ptr_curr_pc);
2038
2039 return retval;
2040}
2041
2044 duk_idx_t idx_retbase,
2045 duk_idx_t num_stack_rets,
2046 duk_size_t entry_valstack_bottom_index,
2047 duk_size_t entry_callstack_top,
2048 duk_size_t entry_catchstack_top) {
2049 duk_context *ctx;
2050 duk_ret_t rc;
2051
2052 DUK_ASSERT(thr != NULL);
2053 ctx = (duk_context *) thr;
2055 DUK_UNREF(entry_valstack_bottom_index);
2056 DUK_UNREF(entry_callstack_top);
2057 DUK_UNREF(entry_catchstack_top);
2058
2059 /*
2060 * Thread state check and book-keeping.
2061 */
2062
2063 if (thr == thr->heap->curr_thread) {
2064 /* same thread */
2065 if (thr->state != DUK_HTHREAD_STATE_RUNNING) {
2066 /* should actually never happen, but check anyway */
2067 goto thread_state_error;
2068 }
2069 } else {
2070 /* different thread */
2071 DUK_ASSERT(thr->heap->curr_thread == NULL ||
2073 if (thr->state != DUK_HTHREAD_STATE_INACTIVE) {
2074 goto thread_state_error;
2075 }
2076 DUK_HEAP_SWITCH_THREAD(thr->heap, thr);
2078
2079 /* Note: multiple threads may be simultaneously in the RUNNING
2080 * state, but not in the same "resume chain".
2081 */
2082 }
2083
2084 DUK_ASSERT(thr->heap->curr_thread == thr);
2086
2087 /*
2088 * Recursion limit check.
2089 *
2090 * Note: there is no need for an "ignore recursion limit" flag
2091 * for duk_handle_safe_call now.
2092 */
2093
2097 /* XXX: error message is a bit misleading: we reached a recursion
2098 * limit which is also essentially the same as a C callstack limit
2099 * (except perhaps with some relaxed threading assumptions).
2100 */
2102 }
2103 thr->heap->call_recursion_depth++;
2104
2105 /*
2106 * Valstack spare check
2107 */
2108
2109 duk_require_stack(ctx, 0); /* internal spare */
2110
2111 /*
2112 * Make the C call
2113 */
2114
2115 rc = func(ctx);
2116
2117 DUK_DDD(DUK_DDDPRINT("safe_call, func rc=%ld", (long) rc));
2118
2119 /*
2120 * Valstack manipulation for results.
2121 */
2122
2123 /* we're running inside the caller's activation, so no change in call/catch stack or valstack bottom */
2124 DUK_ASSERT(thr->callstack_top == entry_callstack_top);
2125 DUK_ASSERT(thr->catchstack_top == entry_catchstack_top);
2126 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
2127 DUK_ASSERT((duk_size_t) (thr->valstack_bottom - thr->valstack) == entry_valstack_bottom_index);
2129 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
2130
2131 if (rc < 0) {
2133 }
2134 DUK_ASSERT(rc >= 0);
2135
2136 if (duk_get_top(ctx) < rc) {
2137 DUK_ERROR_API(thr, "not enough stack values for safe_call rc");
2138 }
2139
2140 DUK_ASSERT(thr->catchstack_top == entry_catchstack_top); /* no need to unwind */
2141 DUK_ASSERT(thr->callstack_top == entry_callstack_top);
2142
2143 duk__safe_call_adjust_valstack(thr, idx_retbase, num_stack_rets, rc);
2144 return;
2145
2146 thread_state_error:
2147 DUK_ERROR_FMT1(thr, DUK_ERR_TYPE_ERROR, "invalid thread state for safe_call (%ld)", (long) thr->state);
2149}
2150
2152 duk_idx_t idx_retbase,
2153 duk_idx_t num_stack_rets,
2154 duk_size_t entry_valstack_bottom_index,
2155 duk_size_t entry_callstack_top,
2156 duk_size_t entry_catchstack_top,
2157 duk_jmpbuf *old_jmpbuf_ptr) {
2158 duk_context *ctx;
2159
2160 DUK_ASSERT(thr != NULL);
2161 ctx = (duk_context *) thr;
2163
2164 /*
2165 * Error during call. The error value is at heap->lj.value1.
2166 *
2167 * The very first thing we do is restore the previous setjmp catcher.
2168 * This means that any error in error handling will propagate outwards
2169 * instead of causing a setjmp() re-entry above.
2170 */
2171
2172 DUK_DDD(DUK_DDDPRINT("error caught during protected duk_handle_safe_call()"));
2173
2174 /* Other longjmp types are handled by executor before propagating
2175 * the error here.
2176 */
2178 DUK_ASSERT(thr->callstack_top >= entry_callstack_top);
2179 DUK_ASSERT(thr->catchstack_top >= entry_catchstack_top);
2180
2181 /* Note: either pointer may be NULL (at entry), so don't assert. */
2182 thr->heap->lj.jmpbuf_ptr = old_jmpbuf_ptr;
2183
2184 DUK_ASSERT(thr->catchstack_top >= entry_catchstack_top);
2185 DUK_ASSERT(thr->callstack_top >= entry_callstack_top);
2186 duk_hthread_catchstack_unwind(thr, entry_catchstack_top);
2188 duk_hthread_callstack_unwind(thr, entry_callstack_top);
2190 thr->valstack_bottom = thr->valstack + entry_valstack_bottom_index;
2191
2192 /* [ ... | (crud) ] */
2193
2194 /* XXX: space in valstack? see discussion in duk_handle_call_xxx(). */
2195 duk_push_tval(ctx, &thr->heap->lj.value1);
2196
2197 /* [ ... | (crud) errobj ] */
2198
2199 DUK_ASSERT(duk_get_top(ctx) >= 1); /* at least errobj must be on stack */
2200
2201 /* check that the valstack has space for the final amount and any
2202 * intermediate space needed; this is unoptimal but should be safe
2203 */
2204 duk_require_stack_top(ctx, idx_retbase + num_stack_rets); /* final configuration */
2205 duk_require_stack(ctx, num_stack_rets);
2206
2207 duk__safe_call_adjust_valstack(thr, idx_retbase, num_stack_rets, 1); /* 1 = num actual 'return values' */
2208
2209 /* [ ... | ] or [ ... | errobj (M * undefined)] where M = num_stack_rets - 1 */
2210
2211 /* These are just convenience "wiping" of state. Side effects should
2212 * not be an issue here: thr->heap and thr->heap->lj have a stable
2213 * pointer. Finalizer runs etc capture even out-of-memory errors so
2214 * nothing should throw here.
2215 */
2217 thr->heap->lj.iserror = 0;
2218 DUK_TVAL_SET_UNDEFINED_UPDREF(thr, &thr->heap->lj.value1); /* side effects */
2219 DUK_TVAL_SET_UNDEFINED_UPDREF(thr, &thr->heap->lj.value2); /* side effects */
2220}
2221
2223 duk_idx_t idx_retbase,
2224 duk_idx_t num_stack_rets,
2225 duk_int_t entry_call_recursion_depth,
2226 duk_hthread *entry_curr_thread,
2227 duk_uint_fast8_t entry_thread_state,
2228 duk_instr_t **entry_ptr_curr_pc) {
2229 duk_context *ctx;
2230
2231 DUK_ASSERT(thr != NULL);
2232 ctx = (duk_context *) thr;
2234 DUK_UNREF(ctx);
2235 DUK_UNREF(idx_retbase);
2236 DUK_UNREF(num_stack_rets);
2237
2238 /* Restore entry thread executor curr_pc stack frame pointer. */
2239 thr->ptr_curr_pc = entry_ptr_curr_pc;
2240
2241 /* XXX: because we unwind stacks above, thr->heap->curr_thread is at
2242 * risk of pointing to an already freed thread. This was indeed the
2243 * case in test-bug-multithread-valgrind.c, until duk_handle_call()
2244 * was fixed to restore thr->heap->curr_thread before rethrowing an
2245 * uncaught error.
2246 */
2247 DUK_HEAP_SWITCH_THREAD(thr->heap, entry_curr_thread); /* may be NULL */
2248 thr->state = (duk_uint8_t) entry_thread_state;
2249
2250 DUK_ASSERT((thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread == NULL) || /* first call */
2251 (thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread != NULL) || /* other call */
2252 (thr->state == DUK_HTHREAD_STATE_RUNNING && thr->heap->curr_thread == thr)); /* current thread */
2253
2254 thr->heap->call_recursion_depth = entry_call_recursion_depth;
2255
2256 /* stack discipline consistency check */
2257 DUK_ASSERT(duk_get_top(ctx) == idx_retbase + num_stack_rets);
2258
2259 /* A debugger forced interrupt check is not needed here, as
2260 * problematic safe calls are not caused by side effects.
2261 */
2262
2263#if defined(DUK_USE_INTERRUPT_COUNTER) && defined(DUK_USE_DEBUG)
2264 duk__interrupt_fixup(thr, entry_curr_thread);
2265#endif
2266}
2267
2268/*
2269 * Helper for handling an Ecmascript-to-Ecmascript call or an Ecmascript
2270 * function (initial) Duktape.Thread.resume().
2271 *
2272 * Compared to normal calls handled by duk_handle_call(), there are a
2273 * bunch of differences:
2274 *
2275 * - the call is never protected
2276 * - there is no C recursion depth increase (hence an "ignore recursion
2277 * limit" flag is not applicable)
2278 * - instead of making the call, this helper just performs the thread
2279 * setup and returns; the bytecode executor then restarts execution
2280 * internally
2281 * - ecmascript functions are never 'vararg' functions (they access
2282 * varargs through the 'arguments' object)
2283 *
2284 * The callstack of the target contains an earlier Ecmascript call in case
2285 * of an Ecmascript-to-Ecmascript call (whose idx_retval is updated), or
2286 * is empty in case of an initial Duktape.Thread.resume().
2287 *
2288 * The first thing to do here is to figure out whether an ecma-to-ecma
2289 * call is actually possible. It's not always the case if the target is
2290 * a bound function; the final function may be native. In that case,
2291 * return an error so caller can fall back to a normal call path.
2292 */
2293
2295 duk_idx_t num_stack_args,
2296 duk_small_uint_t call_flags) {
2297 duk_context *ctx = (duk_context *) thr;
2298 duk_size_t entry_valstack_bottom_index;
2299 duk_idx_t idx_func; /* valstack index of 'func' and retval (relative to entry valstack_bottom) */
2300 duk_idx_t idx_args; /* valstack index of start of args (arg1) (relative to entry valstack_bottom) */
2301 duk_idx_t nargs; /* # argument registers target function wants (< 0 => never for ecma calls) */
2302 duk_idx_t nregs; /* # total registers target function wants on entry (< 0 => never for ecma calls) */
2303 duk_hobject *func; /* 'func' on stack (borrowed reference) */
2304 duk_tval *tv_func; /* duk_tval ptr for 'func' on stack (borrowed reference) */
2305 duk_activation *act;
2306 duk_hobject *env;
2307 duk_bool_t use_tailcall;
2308 duk_instr_t **entry_ptr_curr_pc;
2309
2310 DUK_ASSERT(thr != NULL);
2311 DUK_ASSERT(ctx != NULL);
2312 DUK_ASSERT(!((call_flags & DUK_CALL_FLAG_IS_RESUME) != 0 && (call_flags & DUK_CALL_FLAG_IS_TAILCALL) != 0));
2313
2314 /* XXX: assume these? */
2315 DUK_ASSERT(thr->valstack != NULL);
2316 DUK_ASSERT(thr->callstack != NULL);
2317 DUK_ASSERT(thr->catchstack != NULL);
2318
2319 /* no need to handle thread state book-keeping here */
2320 DUK_ASSERT((call_flags & DUK_CALL_FLAG_IS_RESUME) != 0 ||
2322 thr->heap->curr_thread == thr));
2323
2324 /* If thr->ptr_curr_pc is set, sync curr_pc to act->pc. Then NULL
2325 * thr->ptr_curr_pc so that it's not accidentally used with an incorrect
2326 * activation when side effects occur. If we end up not making the
2327 * call we must restore the value.
2328 */
2329 entry_ptr_curr_pc = thr->ptr_curr_pc;
2331
2332 /* if a tail call:
2333 * - an Ecmascript activation must be on top of the callstack
2334 * - there cannot be any active catchstack entries
2335 */
2336#if defined(DUK_USE_ASSERTIONS)
2337 if (call_flags & DUK_CALL_FLAG_IS_TAILCALL) {
2338 duk_size_t our_callstack_index;
2339 duk_size_t i;
2340
2341 DUK_ASSERT(thr->callstack_top >= 1);
2342 our_callstack_index = thr->callstack_top - 1;
2343 DUK_ASSERT_DISABLE(our_callstack_index >= 0);
2344 DUK_ASSERT(our_callstack_index < thr->callstack_size);
2345 DUK_ASSERT(DUK_ACT_GET_FUNC(thr->callstack + our_callstack_index) != NULL);
2347
2348 /* No entry in the catchstack which would actually catch a
2349 * throw can refer to the callstack entry being reused.
2350 * There *can* be catchstack entries referring to the current
2351 * callstack entry as long as they don't catch (e.g. label sites).
2352 */
2353
2354 for (i = 0; i < thr->catchstack_top; i++) {
2355 DUK_ASSERT(thr->catchstack[i].callstack_index < our_callstack_index || /* refer to callstack entries below current */
2356 DUK_CAT_GET_TYPE(thr->catchstack + i) == DUK_CAT_TYPE_LABEL); /* or a non-catching entry */
2357 }
2358 }
2359#endif /* DUK_USE_ASSERTIONS */
2360
2361 entry_valstack_bottom_index = (duk_size_t) (thr->valstack_bottom - thr->valstack);
2362 /* XXX: rework */
2363 idx_func = duk_normalize_index(thr, -num_stack_args - 2);
2364 idx_args = idx_func + 2;
2365
2366 DUK_DD(DUK_DDPRINT("handle_ecma_call_setup: thr=%p, "
2367 "num_stack_args=%ld, call_flags=0x%08lx (resume=%ld, tailcall=%ld), "
2368 "idx_func=%ld, idx_args=%ld, entry_valstack_bottom_index=%ld",
2369 (void *) thr,
2370 (long) num_stack_args,
2371 (unsigned long) call_flags,
2372 (long) ((call_flags & DUK_CALL_FLAG_IS_RESUME) != 0 ? 1 : 0),
2373 (long) ((call_flags & DUK_CALL_FLAG_IS_TAILCALL) != 0 ? 1 : 0),
2374 (long) idx_func,
2375 (long) idx_args,
2376 (long) entry_valstack_bottom_index));
2377
2378 if (DUK_UNLIKELY(idx_func < 0 || idx_args < 0)) {
2379 /* XXX: assert? compiler is responsible for this never happening */
2381 }
2382
2383 /*
2384 * Check the function type, handle bound function chains, and prepare
2385 * parameters for the rest of the call handling. Also figure out the
2386 * effective 'this' binding, which replaces the current value at
2387 * idx_func + 1.
2388 *
2389 * If the target function is a 'bound' one, follow the chain of 'bound'
2390 * functions until a non-bound function is found. During this process,
2391 * bound arguments are 'prepended' to existing ones, and the "this"
2392 * binding is overridden. See E5 Section 15.3.4.5.1.
2393 *
2394 * If the final target function cannot be handled by an ecma-to-ecma
2395 * call, return to the caller with a return value indicating this case.
2396 * The bound chain is resolved and the caller can resume with a plain
2397 * function call.
2398 */
2399
2400 func = duk__nonbound_func_lookup(ctx, idx_func, &num_stack_args, &tv_func, call_flags);
2401 if (func == NULL || !DUK_HOBJECT_IS_COMPILEDFUNCTION(func)) {
2402 DUK_DDD(DUK_DDDPRINT("final target is a lightfunc/nativefunc, cannot do ecma-to-ecma call"));
2403 thr->ptr_curr_pc = entry_ptr_curr_pc;
2404 return 0;
2405 }
2406 /* XXX: tv_func is not actually needed */
2407
2408 DUK_ASSERT(func != NULL);
2411
2412 duk__coerce_effective_this_binding(thr, func, idx_func + 1);
2413 DUK_DDD(DUK_DDDPRINT("effective 'this' binding is: %!T",
2414 duk_get_tval(ctx, idx_func + 1)));
2415
2416 nargs = ((duk_hcompiledfunction *) func)->nargs;
2417 nregs = ((duk_hcompiledfunction *) func)->nregs;
2418 DUK_ASSERT(nregs >= nargs);
2419
2420 /* [ ... func this arg1 ... argN ] */
2421
2422 /*
2423 * Preliminary activation record and valstack manipulation.
2424 * The concrete actions depend on whether the we're dealing
2425 * with a tail call (reuse an existing activation), a resume,
2426 * or a normal call.
2427 *
2428 * The basic actions, in varying order, are:
2429 *
2430 * - Check stack size for call handling
2431 * - Grow call stack if necessary (non-tail-calls)
2432 * - Update current activation (idx_retval) if necessary
2433 * (non-tail, non-resume calls)
2434 * - Move start of args (idx_args) to valstack bottom
2435 * (tail calls)
2436 *
2437 * Don't touch valstack_bottom or valstack_top yet so that Duktape API
2438 * calls work normally.
2439 */
2440
2441 /* XXX: some overlapping code; cleanup */
2442 use_tailcall = call_flags & DUK_CALL_FLAG_IS_TAILCALL;
2443#if !defined(DUK_USE_TAILCALL)
2444 DUK_ASSERT(use_tailcall == 0); /* compiler ensures this */
2445#endif
2446 if (use_tailcall) {
2447 /* tailcall cannot be flagged to resume calls, and a
2448 * previous frame must exist
2449 */
2450 DUK_ASSERT(thr->callstack_top >= 1);
2451 DUK_ASSERT((call_flags & DUK_CALL_FLAG_IS_RESUME) == 0);
2452
2453 act = thr->callstack + thr->callstack_top - 1;
2454 if (act->flags & DUK_ACT_FLAG_PREVENT_YIELD) {
2455 /* See: test-bug-tailcall-preventyield-assert.c. */
2456 DUK_DDD(DUK_DDDPRINT("tail call prevented by current activation having DUK_ACT_FLAG_PREVENTYIELD"));
2457 use_tailcall = 0;
2458 } else if (DUK_HOBJECT_HAS_NOTAIL(func)) {
2459 DUK_D(DUK_DPRINT("tail call prevented by function having a notail flag"));
2460 use_tailcall = 0;
2461 }
2462 }
2463
2464 if (use_tailcall) {
2465 duk_tval *tv1, *tv2;
2466 duk_size_t cs_index;
2467 duk_int_t i_stk; /* must be signed for loop structure */
2468 duk_idx_t i_arg;
2469
2470 /*
2471 * Tailcall handling
2472 *
2473 * Although the callstack entry is reused, we need to explicitly unwind
2474 * the current activation (or simulate an unwind). In particular, the
2475 * current activation must be closed, otherwise something like
2476 * test-bug-reduce-judofyr.js results. Also catchstack needs be unwound
2477 * because there may be non-error-catching label entries in valid tail calls.
2478 */
2479
2480 DUK_DDD(DUK_DDDPRINT("is tail call, reusing activation at callstack top, at index %ld",
2481 (long) (thr->callstack_top - 1)));
2482
2483 /* 'act' already set above */
2484
2489
2490 /* Unwind catchstack entries referring to the callstack entry we're reusing */
2491 cs_index = thr->callstack_top - 1;
2492 DUK_ASSERT(thr->catchstack_top <= DUK_INT_MAX); /* catchstack limits */
2493 for (i_stk = (duk_int_t) (thr->catchstack_top - 1); i_stk >= 0; i_stk--) {
2494 duk_catcher *cat = thr->catchstack + i_stk;
2495 if (cat->callstack_index != cs_index) {
2496 /* 'i' is the first entry we'll keep */
2497 break;
2498 }
2499 }
2500 duk_hthread_catchstack_unwind(thr, i_stk + 1);
2501
2502 /* Unwind the topmost callstack entry before reusing it */
2503 DUK_ASSERT(thr->callstack_top > 0);
2505
2506 /* Then reuse the unwound activation; callstack was not shrunk so there is always space */
2507 thr->callstack_top++;
2509 act = thr->callstack + thr->callstack_top - 1;
2510
2511 /* Start filling in the activation */
2512 act->func = func; /* don't want an intermediate exposed state with func == NULL */
2513#if defined(DUK_USE_NONSTD_FUNC_CALLER_PROPERTY)
2514 act->prev_caller = NULL;
2515#endif
2516 DUK_ASSERT(func != NULL);
2518 /* don't want an intermediate exposed state with invalid pc */
2520#if defined(DUK_USE_DEBUGGER_SUPPORT)
2521 act->prev_line = 0;
2522#endif
2523 DUK_TVAL_SET_OBJECT(&act->tv_func, func); /* borrowed, no refcount */
2524#if defined(DUK_USE_REFERENCE_COUNTING)
2525 DUK_HOBJECT_INCREF(thr, func);
2526 act = thr->callstack + thr->callstack_top - 1; /* side effects (currently none though) */
2527#endif
2528
2529#if defined(DUK_USE_NONSTD_FUNC_CALLER_PROPERTY)
2530#if defined(DUK_USE_TAILCALL)
2531#error incorrect options: tail calls enabled with function caller property
2532#endif
2533 /* XXX: this doesn't actually work properly for tail calls, so
2534 * tail calls are disabled when DUK_USE_NONSTD_FUNC_CALLER_PROPERTY
2535 * is in use.
2536 */
2537 duk__update_func_caller_prop(thr, func);
2538 act = thr->callstack + thr->callstack_top - 1;
2539#endif
2540
2541 act->flags = (DUK_HOBJECT_HAS_STRICT(func) ?
2544
2545 DUK_ASSERT(DUK_ACT_GET_FUNC(act) == func); /* already updated */
2546 DUK_ASSERT(act->var_env == NULL); /* already NULLed (by unwind) */
2547 DUK_ASSERT(act->lex_env == NULL); /* already NULLed (by unwind) */
2548 act->idx_bottom = entry_valstack_bottom_index; /* tail call -> reuse current "frame" */
2549 DUK_ASSERT(nregs >= 0);
2550#if 0 /* topmost activation idx_retval is considered garbage, no need to init */
2551 act->idx_retval = 0;
2552#endif
2553
2554 /*
2555 * Manipulate valstack so that args are on the current bottom and the
2556 * previous caller's 'this' binding (which is the value preceding the
2557 * current bottom) is replaced with the new 'this' binding:
2558 *
2559 * [ ... this_old | (crud) func this_new arg1 ... argN ]
2560 * --> [ ... this_new | arg1 ... argN ]
2561 *
2562 * For tail calling to work properly, the valstack bottom must not grow
2563 * here; otherwise crud would accumulate on the valstack.
2564 */
2565
2566 tv1 = thr->valstack_bottom - 1;
2567 tv2 = thr->valstack_bottom + idx_func + 1;
2568 DUK_ASSERT(tv1 >= thr->valstack && tv1 < thr->valstack_top); /* tv1 is -below- valstack_bottom */
2569 DUK_ASSERT(tv2 >= thr->valstack_bottom && tv2 < thr->valstack_top);
2570 DUK_TVAL_SET_TVAL_UPDREF(thr, tv1, tv2); /* side effects */
2571
2572 for (i_arg = 0; i_arg < idx_args; i_arg++) {
2573 /* XXX: block removal API primitive */
2574 /* Note: 'func' is popped from valstack here, but it is
2575 * already reachable from the activation.
2576 */
2577 duk_remove(ctx, 0);
2578 }
2579 idx_func = 0; DUK_UNREF(idx_func); /* really 'not applicable' anymore, should not be referenced after this */
2580 idx_args = 0;
2581
2582 /* [ ... this_new | arg1 ... argN ] */
2583 } else {
2584 DUK_DDD(DUK_DDDPRINT("not a tail call, pushing a new activation to callstack, to index %ld",
2585 (long) (thr->callstack_top)));
2586
2588
2589 if (call_flags & DUK_CALL_FLAG_IS_RESUME) {
2590 DUK_DDD(DUK_DDDPRINT("is resume -> no update to current activation (may not even exist)"));
2591 } else {
2592 DUK_DDD(DUK_DDDPRINT("update to current activation idx_retval"));
2594 DUK_ASSERT(thr->callstack_top >= 1);
2595 act = thr->callstack + thr->callstack_top - 1;
2598 act->idx_retval = entry_valstack_bottom_index + idx_func;
2599 }
2600
2602 act = thr->callstack + thr->callstack_top;
2603 thr->callstack_top++;
2605
2609
2610 act->flags = (DUK_HOBJECT_HAS_STRICT(func) ?
2612 0);
2613 act->func = func;
2614 act->var_env = NULL;
2615 act->lex_env = NULL;
2616#if defined(DUK_USE_NONSTD_FUNC_CALLER_PROPERTY)
2617 act->prev_caller = NULL;
2618#endif
2619 DUK_ASSERT(func != NULL);
2622#if defined(DUK_USE_DEBUGGER_SUPPORT)
2623 act->prev_line = 0;
2624#endif
2625 act->idx_bottom = entry_valstack_bottom_index + idx_args;
2626 DUK_ASSERT(nregs >= 0);
2627#if 0 /* topmost activation idx_retval is considered garbage, no need to init */
2628 act->idx_retval = 0;
2629#endif
2630 DUK_TVAL_SET_OBJECT(&act->tv_func, func); /* borrowed, no refcount */
2631
2632 DUK_HOBJECT_INCREF(thr, func); /* act->func */
2633
2634#if defined(DUK_USE_NONSTD_FUNC_CALLER_PROPERTY)
2635 duk__update_func_caller_prop(thr, func);
2636 act = thr->callstack + thr->callstack_top - 1;
2637#endif
2638 }
2639
2640 /* [ ... func this arg1 ... argN ] (not tail call)
2641 * [ this | arg1 ... argN ] (tail call)
2642 *
2643 * idx_args updated to match
2644 */
2645
2646 /*
2647 * Environment record creation and 'arguments' object creation.
2648 * Named function expression name binding is handled by the
2649 * compiler; the compiled function's parent env will contain
2650 * the (immutable) binding already.
2651 *
2652 * Delayed creation (on demand) is handled in duk_js_var.c.
2653 */
2654
2655 /* XXX: unify handling with native call. */
2656
2657 DUK_ASSERT(!DUK_HOBJECT_HAS_BOUND(func)); /* bound function chain has already been resolved */
2658
2659 if (!DUK_HOBJECT_HAS_NEWENV(func)) {
2660 /* use existing env (e.g. for non-strict eval); cannot have
2661 * an own 'arguments' object (but can refer to the existing one)
2662 */
2663
2664 duk__handle_oldenv_for_call(thr, func, act);
2665 /* No need to re-lookup 'act' at present: no side effects. */
2666
2667 DUK_ASSERT(act->lex_env != NULL);
2668 DUK_ASSERT(act->var_env != NULL);
2669 goto env_done;
2670 }
2671
2673
2674 if (!DUK_HOBJECT_HAS_CREATEARGS(func)) {
2675 /* no need to create environment record now; leave as NULL */
2676 DUK_ASSERT(act->lex_env == NULL);
2677 DUK_ASSERT(act->var_env == NULL);
2678 goto env_done;
2679 }
2680
2681 /* third arg: absolute index (to entire valstack) of idx_bottom of new activation */
2683 DUK_ASSERT(env != NULL);
2684
2685 /* [ ... arg1 ... argN envobj ] */
2686
2687 /* original input stack before nargs/nregs handling must be
2688 * intact for 'arguments' object
2689 */
2691 duk__handle_createargs_for_call(thr, func, env, num_stack_args);
2692
2693 /* [ ... arg1 ... argN envobj ] */
2694
2695 act = thr->callstack + thr->callstack_top - 1;
2696 act->lex_env = env;
2697 act->var_env = env;
2698 DUK_HOBJECT_INCREF(thr, act->lex_env);
2699 DUK_HOBJECT_INCREF(thr, act->var_env);
2700 duk_pop(ctx);
2701
2702 env_done:
2703 /* [ ... arg1 ... argN ] */
2704
2705 /*
2706 * Setup value stack: clamp to 'nargs', fill up to 'nregs'
2707 */
2708
2710 num_stack_args,
2711 idx_args,
2712 nregs,
2713 nargs,
2714 func);
2715
2716 /*
2717 * Shift to new valstack_bottom.
2718 */
2719
2720 thr->valstack_bottom = thr->valstack_bottom + idx_args;
2721 /* keep current valstack_top */
2722 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
2724 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
2725
2726 /*
2727 * Return to bytecode executor, which will resume execution from
2728 * the topmost activation.
2729 */
2730
2731 return 1;
2732}
CURL_EXTERN int void * arg
Definition curl.h:2622
unsigned int duk_small_uint_t
duk_small_int_t duk_ret_t
duk_uint8_t duk_uint_fast8_t
duk_int_fast32_t duk_int_t
duk_uint_fast32_t duk_uint_t
duk_small_int_t duk_bool_t
#define DUK_ACT_FLAG_TAILCALLED
DUK_INTERNAL_DECL void duk_hthread_callstack_grow(duk_hthread *thr)
#define DUK_TVAL_SET_TVAL(v, x)
#define DUK_ACT_FLAG_PREVENT_YIELD
#define DUK_ERROR_RANGE(thr, msg)
#define DUK_ASSERT_CTX_VALID(ctx)
DUK_EXTERNAL void duk_pop_2(duk_context *ctx)
#define DUK_TVAL_SET_NULL(tv)
DUK_EXTERNAL duk_idx_t duk_normalize_index(duk_context *ctx, duk_idx_t index)
#define DUK_HCOMPILEDFUNCTION_GET_CODE_BASE(heap, h)
DUK_INTERNAL_DECL duk_hobject * duk_require_hobject(duk_context *ctx, duk_idx_t index)
#define DUK_ERROR_TYPE(thr, msg)
#define DUK_HOBJECT_HAS_BOUND(h)
DUK_INTERNAL_DECL duk_bool_t duk_get_prop_stridx(duk_context *ctx, duk_idx_t obj_index, duk_small_int_t stridx)
#define DUK_STRIDX_INT_FORMALS
#define DUK_CALL_FLAG_CONSTRUCTOR_CALL
#define DUK_CAT_GET_TYPE(c)
#define DUK_VSRESIZE_FLAG_THROW
#define DUK_STRIDX_LC_ARGUMENTS
DUK_INTERNAL_DECL void duk_hthread_catchstack_shrink_check(duk_hthread *thr)
#define DUK_TVAL_SET_OBJECT(tv, hptr)
DUK_INTERNAL_DECL void duk_hthread_catchstack_unwind(duk_hthread *thr, duk_size_t new_top)
#define DUK_TVAL_GET_OBJECT(tv)
DUK_INTERNAL_DECL duk_hobject * duk_create_activation_environment_record(duk_hthread *thr, duk_hobject *func, duk_size_t idx_bottom)
#define DUK_HOBJECT_CLASS_OBJECT
#define DUK_STR_INVALID_CALL_ARGS
#define DUK_BIDX_OBJECT_PROTOTYPE
#define DUK_HOBJECT_SET_EXOTIC_ARGUMENTS(h)
#define duk_xdef_prop_index_wec(ctx, obj_index, arr_index)
#define duk_xdef_prop_wec(ctx, obj_index)
#define DUK_TVAL_IS_UNDEFINED(tv)
#define DUK_HOBJECT_IS_COMPILEDFUNCTION(h)
#define DUK_TVAL_GET_LIGHTFUNC_FUNCPTR(tv)
DUK_EXTERNAL duk_idx_t duk_require_top_index(duk_context *ctx)
#define DUK_TVAL_IS_OBJECT(tv)
#define DUK_HOBJECT_HAS_CREATEARGS(h)
#define DUK_PROPDESC_FLAGS_NONE
DUK_EXTERNAL void duk_require_stack(duk_context *ctx, duk_idx_t extra)
#define DUK_TVAL_INCREF(thr, tv)
#define DUK_HOBJECT_CLASS_ARGUMENTS
#define DUK_ERROR_INTERNAL_DEFMSG(thr)
#define DUK_TVAL_GET_TAG(tv)
#define DUK_PROPDESC_FLAGS_WE
#define DUK_HOBJECT_IS_NONBOUND_FUNCTION(h)
#define DUK_VSRESIZE_FLAG_SHRINK
DUK_EXTERNAL void duk_remove(duk_context *ctx, duk_idx_t index)
#define DUK_TVAL_IS_UNUSED(tv)
#define DUK_HTHREAD_STRING_INT_VARENV(thr)
#define DUK_ASSERT_DISABLE(x)
#define DUK_HOBJECT_IS_CALLABLE(h)
DUK_EXTERNAL void duk_push_int(duk_context *ctx, duk_int_t val)
#define DUK_CALL_FLAG_IS_RESUME
#define DUK_ERROR_API(thr, msg)
#define DUK_HTHREAD_STRING_INT_LEXENV(thr)
#define DUK_HOBJECT_CLASS_AS_FLAGS(v)
#define DUK_CALL_FLAG_IS_TAILCALL
#define DUK_HTHREAD_STATE_INACTIVE
#define DUK_ACT_FLAG_DIRECT_EVAL
#define DUK_HOBJECT_IS_ENV(h)
#define DUK_LFUNC_NARGS_VARARGS
DUK_INTERNAL_DECL duk_tval * duk_hobject_find_existing_entry_tval_ptr(duk_heap *heap, duk_hobject *obj, duk_hstring *key)
DUK_EXTERNAL void duk_replace(duk_context *ctx, duk_idx_t to_index)
DUK_INTERNAL_DECL void duk_hthread_callstack_unwind(duk_hthread *thr, duk_size_t new_top)
DUK_EXTERNAL void duk_require_stack_top(duk_context *ctx, duk_idx_t top)
#define DUK_TVAL_SET_TVAL_UPDREF
#define DUK_HOBJECT_IS_NATIVEFUNCTION(h)
DUK_EXTERNAL const char * duk_get_string(duk_context *ctx, duk_idx_t index)
#define DUK_ACT_FLAG_CONSTRUCT
DUK_INTERNAL_DECL duk_bool_t duk_valstack_resize_raw(duk_context *ctx, duk_size_t min_new_size, duk_small_uint_t flags)
#define DUK_VSRESIZE_FLAG_COMPACT
#define DUK_TVAL_IS_LIGHTFUNC(tv)
DUK_EXTERNAL void duk_set_top(duk_context *ctx, duk_idx_t index)
#define DUK_HTHREAD_STATE_RUNNING
DUK_EXTERNAL void duk_dup(duk_context *ctx, duk_idx_t from_index)
#define DUK_TVAL_CHKFAST_INPLACE(v)
DUK_EXTERNAL void duk_insert(duk_context *ctx, duk_idx_t to_index)
DUK_EXTERNAL duk_idx_t duk_get_top(duk_context *ctx)
DUK_INTERNAL_DECL void duk_hthread_sync_and_null_currpc(duk_hthread *thr)
DUK_EXTERNAL duk_bool_t duk_get_prop_index(duk_context *ctx, duk_idx_t obj_index, duk_uarridx_t arr_index)
#define DUK_TVAL_IS_HEAP_ALLOCATED(tv)
DUK_INTERNAL_DECL void duk_push_tval(duk_context *ctx, duk_tval *tv)
duk_uint32_t duk_instr_t
#define DUK_HOBJECT_HAS_NATIVEFUNCTION(h)
DUK_EXTERNAL void duk_push_uint(duk_context *ctx, duk_uint_t val)
struct duk_tval_struct duk_tval
DUK_EXTERNAL void duk_push_undefined(duk_context *ctx)
#define DUK_STRIDX_INT_TARGET
#define DUK_STR_C_CALLSTACK_LIMIT
#define DUK_PROPDESC_FLAGS_WC
DUK_INTERNAL_DECL duk_tval * duk_require_tval(duk_context *ctx, duk_idx_t index)
DUK_INTERNAL_DECL void duk_xdef_prop_stridx_thrower(duk_context *ctx, duk_idx_t obj_index, duk_small_int_t stridx, duk_small_uint_t desc_flags)
DUK_INTERNAL_DECL void duk_hthread_callstack_shrink_check(duk_hthread *thr)
#define DUK_HOBJECT_BOUND_CHAIN_SANITY
#define DUK_HOBJECT_FLAG_EXTENSIBLE
DUK_INTERNAL_DECL duk_idx_t duk_push_object_helper(duk_context *ctx, duk_uint_t hobject_flags_and_class, duk_small_int_t prototype_bidx)
#define DUK_STRIDX_INT_VARENV
#define DUK_HOBJECT_HAS_NEWENV(h)
#define DUK_VALSTACK_API_ENTRY_MINIMUM
DUK_INTERNAL void duk_error_throw_from_negative_rc(duk_hthread *thr, duk_ret_t rc)
DUK_EXTERNAL void duk_to_object(duk_context *ctx, duk_idx_t index)
DUK_EXTERNAL const char * duk_to_string(duk_context *ctx, duk_idx_t index)
#define DUK_GET_TVAL_POSIDX(ctx, idx)
#define DUK_HOBJECT_INCREF(thr, h)
#define DUK_ACT_GET_FUNC(act)
DUK_EXTERNAL void duk_pop(duk_context *ctx)
DUK_INTERNAL_DECL duk_hobject * duk_get_hobject(duk_context *ctx, duk_idx_t index)
DUK_INTERNAL_DECL void duk_push_hobject(duk_context *ctx, duk_hobject *h)
#define DUK_TVAL_SET_UNDEFINED_UPDREF
#define DUK_LFUNC_FLAGS_GET_NARGS(lf_flags)
#define DUK_CALL_FLAG_DIRECT_EVAL
DUK_INTERNAL_DECL void duk_xdef_prop_stridx(duk_context *ctx, duk_idx_t obj_index, duk_small_int_t stridx, duk_small_uint_t desc_flags)
DUK_EXTERNAL duk_bool_t duk_is_string(duk_context *ctx, duk_idx_t index)
#define DUK_HEAP_SWITCH_THREAD(heap, newthr)
#define DUK_STR_BOUND_CHAIN_LIMIT
#define DUK_TVAL_SET_UNDEFINED(tv)
DUK_EXTERNAL duk_int_t duk_require_int(duk_context *ctx, duk_idx_t index)
DUK_INTERNAL_DECL const char * duk_push_string_tval_readable(duk_context *ctx, duk_tval *tv)
#define DUK_HOBJECT_HAS_STRICT(h)
#define DUK_HOBJECT_HAS_NOTAIL(h)
DUK_INTERNAL_DECL void duk_js_execute_bytecode(duk_hthread *exec_thr)
#define DUK_HOBJECT_INCREF_ALLOWNULL(thr, h)
#define DUK_HTHREAD_STRING_CALLER(thr)
#define DUK_ERROR_FMT1(thr, err, fmt, arg1)
#define DUK_HOBJECT_HAS_COMPILEDFUNCTION(h)
#define DUK_TVAL_GET_LIGHTFUNC_FLAGS(tv)
#define DUK_VALSTACK_INTERNAL_EXTRA
#define DUK_HOBJECT_FLAG_ARRAY_PART
DUK_EXTERNAL duk_bool_t duk_has_prop(duk_context *ctx, duk_idx_t obj_index)
DUK_INTERNAL_DECL duk_tval * duk_get_tval(duk_context *ctx, duk_idx_t index)
#define DUK_CALL_FLAG_IGNORE_RECLIMIT
duk_ret_t(* duk_c_function)(duk_context *ctx)
duk_ret_t(* duk_safe_call_function)(duk_context *ctx)
DUK_LOCAL void duk__coerce_effective_this_binding(duk_hthread *thr, duk_hobject *func, duk_idx_t idx_this)
DUK_LOCAL void duk__safe_call_adjust_valstack(duk_hthread *thr, duk_idx_t idx_retbase, duk_idx_t num_stack_rets, duk_idx_t num_actual_rets)
DUK_LOCAL duk_hobject * duk__nonbound_func_lookup(duk_context *ctx, duk_idx_t idx_func, duk_idx_t *out_num_stack_args, duk_tval **out_tv_func, duk_small_uint_t call_flags)
DUK_LOCAL void duk__create_arguments_object(duk_hthread *thr, duk_hobject *func, duk_hobject *varenv, duk_idx_t idx_argbase, duk_idx_t num_stack_args)
DUK_LOCAL duk_idx_t duk__get_idx_func(duk_hthread *thr, duk_idx_t num_stack_args)
DUK_LOCAL void duk__handle_bound_chain_for_call(duk_hthread *thr, duk_idx_t idx_func, duk_idx_t *p_num_stack_args, duk_bool_t is_constructor_call)
DUK_LOCAL void duk__handle_oldenv_for_call(duk_hthread *thr, duk_hobject *func, duk_activation *act)
DUK_INTERNAL duk_int_t duk_handle_safe_call(duk_hthread *thr, duk_safe_call_function func, duk_idx_t num_stack_args, duk_idx_t num_stack_rets)
DUK_LOCAL void duk__adjust_valstack_and_top(duk_hthread *thr, duk_idx_t num_stack_args, duk_idx_t idx_args, duk_idx_t nregs, duk_idx_t nargs, duk_hobject *func)
DUK_LOCAL void duk__handle_safe_call_error(duk_hthread *thr, duk_idx_t idx_retbase, duk_idx_t num_stack_rets, duk_size_t entry_valstack_bottom_index, duk_size_t entry_callstack_top, duk_size_t entry_catchstack_top, duk_jmpbuf *old_jmpbuf_ptr)
DUK_LOCAL void duk__handle_call_inner(duk_hthread *thr, duk_idx_t num_stack_args, duk_small_uint_t call_flags, duk_idx_t idx_func)
DUK_LOCAL void duk__handle_call_error(duk_hthread *thr, duk_size_t entry_valstack_bottom_index, duk_size_t entry_valstack_end, duk_size_t entry_catchstack_top, duk_size_t entry_callstack_top, duk_int_t entry_call_recursion_depth, duk_hthread *entry_curr_thread, duk_uint_fast8_t entry_thread_state, duk_instr_t **entry_ptr_curr_pc, duk_idx_t idx_func, duk_jmpbuf *old_jmpbuf_ptr)
DUK_LOCAL void duk__handle_safe_call_shared(duk_hthread *thr, duk_idx_t idx_retbase, duk_idx_t num_stack_rets, duk_int_t entry_call_recursion_depth, duk_hthread *entry_curr_thread, duk_uint_fast8_t entry_thread_state, duk_instr_t **entry_ptr_curr_pc)
DUK_INTERNAL void duk_handle_call_unprotected(duk_hthread *thr, duk_idx_t num_stack_args, duk_small_uint_t call_flags)
DUK_LOCAL void duk__handle_createargs_for_call(duk_hthread *thr, duk_hobject *func, duk_hobject *env, duk_idx_t num_stack_args)
DUK_INTERNAL duk_int_t duk_handle_call_protected(duk_hthread *thr, duk_idx_t num_stack_args, duk_small_uint_t call_flags)
DUK_LOCAL void duk__handle_safe_call_inner(duk_hthread *thr, duk_safe_call_function func, duk_idx_t idx_retbase, duk_idx_t num_stack_rets, duk_size_t entry_valstack_bottom_index, duk_size_t entry_callstack_top, duk_size_t entry_catchstack_top)
DUK_INTERNAL duk_bool_t duk_handle_ecma_call_setup(duk_hthread *thr, duk_idx_t num_stack_args, duk_small_uint_t call_flags)
#define NULL
Definition gmacros.h:924
duk_hobject * builtins[DUK_NUM_BUILTINS]