1 /* Frame object implementation */
2
3 #include "Python.h"
4 #include "pycore_ceval.h" // _PyEval_BuiltinsFromGlobals()
5 #include "pycore_code.h" // CO_FAST_LOCAL, etc.
6 #include "pycore_function.h" // _PyFunction_FromConstructor()
7 #include "pycore_moduleobject.h" // _PyModule_GetDict()
8 #include "pycore_object.h" // _PyObject_GC_UNTRACK()
9 #include "pycore_opcode.h" // _PyOpcode_Caches
10
11 #include "frameobject.h" // PyFrameObject
12 #include "pycore_frame.h"
13 #include "opcode.h" // EXTENDED_ARG
14 #include "structmember.h" // PyMemberDef
15
16 #define OFF(x) offsetof(PyFrameObject, x)
17
18 static PyMemberDef frame_memberlist[] = {
19 {"f_trace_lines", T_BOOL, OFF(f_trace_lines), 0},
20 {"f_trace_opcodes", T_BOOL, OFF(f_trace_opcodes), 0},
21 {NULL} /* Sentinel */
22 };
23
24
25 static PyObject *
26 frame_getlocals(PyFrameObject *f, void *closure)
27 {
28 if (PyFrame_FastToLocalsWithError(f) < 0)
29 return NULL;
30 PyObject *locals = f->f_frame->f_locals;
31 Py_INCREF(locals);
32 return locals;
33 }
34
35 int
36 PyFrame_GetLineNumber(PyFrameObject *f)
37 {
38 assert(f != NULL);
39 if (f->f_lineno != 0) {
40 return f->f_lineno;
41 }
42 else {
43 return _PyInterpreterFrame_GetLine(f->f_frame);
44 }
45 }
46
47 static PyObject *
48 frame_getlineno(PyFrameObject *f, void *closure)
49 {
50 int lineno = PyFrame_GetLineNumber(f);
51 if (lineno < 0) {
52 Py_RETURN_NONE;
53 }
54 else {
55 return PyLong_FromLong(lineno);
56 }
57 }
58
59 static PyObject *
60 frame_getlasti(PyFrameObject *f, void *closure)
61 {
62 int lasti = _PyInterpreterFrame_LASTI(f->f_frame);
63 if (lasti < 0) {
64 return PyLong_FromLong(-1);
65 }
66 return PyLong_FromLong(lasti * sizeof(_Py_CODEUNIT));
67 }
68
69 static PyObject *
70 frame_getglobals(PyFrameObject *f, void *closure)
71 {
72 PyObject *globals = f->f_frame->f_globals;
73 if (globals == NULL) {
74 globals = Py_None;
75 }
76 Py_INCREF(globals);
77 return globals;
78 }
79
80 static PyObject *
81 frame_getbuiltins(PyFrameObject *f, void *closure)
82 {
83 PyObject *builtins = f->f_frame->f_builtins;
84 if (builtins == NULL) {
85 builtins = Py_None;
86 }
87 Py_INCREF(builtins);
88 return builtins;
89 }
90
91 static PyObject *
92 frame_getcode(PyFrameObject *f, void *closure)
93 {
94 if (PySys_Audit("object.__getattr__", "Os", f, "f_code") < 0) {
95 return NULL;
96 }
97 return (PyObject *)PyFrame_GetCode(f);
98 }
99
100 static PyObject *
101 frame_getback(PyFrameObject *f, void *closure)
102 {
103 PyObject *res = (PyObject *)PyFrame_GetBack(f);
104 if (res == NULL) {
105 Py_RETURN_NONE;
106 }
107 return res;
108 }
109
110 // Given the index of the effective opcode, scan back to construct the oparg
111 // with EXTENDED_ARG. This only works correctly with *unquickened* code,
112 // obtained via a call to _PyCode_GetCode!
113 static unsigned int
114 get_arg(const _Py_CODEUNIT *codestr, Py_ssize_t i)
115 {
116 _Py_CODEUNIT word;
117 unsigned int oparg = _Py_OPARG(codestr[i]);
118 if (i >= 1 && _Py_OPCODE(word = codestr[i-1]) == EXTENDED_ARG) {
119 oparg |= _Py_OPARG(word) << 8;
120 if (i >= 2 && _Py_OPCODE(word = codestr[i-2]) == EXTENDED_ARG) {
121 oparg |= _Py_OPARG(word) << 16;
122 if (i >= 3 && _Py_OPCODE(word = codestr[i-3]) == EXTENDED_ARG) {
123 oparg |= _Py_OPARG(word) << 24;
124 }
125 }
126 }
127 return oparg;
128 }
129
130 /* Model the evaluation stack, to determine which jumps
131 * are safe and how many values needs to be popped.
132 * The stack is modelled by a 64 integer, treating any
133 * stack that can't fit into 64 bits as "overflowed".
134 */
135
136 typedef enum kind {
137 Iterator = 1,
138 Except = 2,
139 Object = 3,
140 Null = 4,
141 Lasti = 5,
142 } Kind;
143
144 static int
145 compatible_kind(Kind from, Kind to) {
146 if (to == 0) {
147 return 0;
148 }
149 if (to == Object) {
150 return from != Null;
151 }
152 if (to == Null) {
153 return 1;
154 }
155 return from == to;
156 }
157
158 #define BITS_PER_BLOCK 3
159
160 #define UNINITIALIZED -2
161 #define OVERFLOWED -1
162
163 #define MAX_STACK_ENTRIES (63/BITS_PER_BLOCK)
164 #define WILL_OVERFLOW (1ULL<<((MAX_STACK_ENTRIES-1)*BITS_PER_BLOCK))
165
166 #define EMPTY_STACK 0
167
168 static inline int64_t
169 push_value(int64_t stack, Kind kind)
170 {
171 if (((uint64_t)stack) >= WILL_OVERFLOW) {
172 return OVERFLOWED;
173 }
174 else {
175 return (stack << BITS_PER_BLOCK) | kind;
176 }
177 }
178
179 static inline int64_t
180 pop_value(int64_t stack)
181 {
182 return Py_ARITHMETIC_RIGHT_SHIFT(int64_t, stack, BITS_PER_BLOCK);
183 }
184
185 #define MASK ((1<<BITS_PER_BLOCK)-1)
186
187 static inline Kind
188 top_of_stack(int64_t stack)
189 {
190 return stack & MASK;
191 }
192
193 static inline Kind
194 peek(int64_t stack, int n)
195 {
196 assert(n >= 1);
197 return (stack>>(BITS_PER_BLOCK*(n-1))) & MASK;
198 }
199
200 static Kind
201 stack_swap(int64_t stack, int n)
202 {
203 assert(n >= 1);
204 Kind to_swap = peek(stack, n);
205 Kind top = top_of_stack(stack);
206 int shift = BITS_PER_BLOCK*(n-1);
207 int64_t replaced_low = (stack & ~(MASK << shift)) | (top << shift);
208 int64_t replaced_top = (replaced_low & ~MASK) | to_swap;
209 return replaced_top;
210 }
211
212 static int64_t
213 pop_to_level(int64_t stack, int level) {
214 if (level == 0) {
215 return EMPTY_STACK;
216 }
217 int64_t max_item = (1<<BITS_PER_BLOCK) - 1;
218 int64_t level_max_stack = max_item << ((level-1) * BITS_PER_BLOCK);
219 while (stack > level_max_stack) {
220 stack = pop_value(stack);
221 }
222 return stack;
223 }
224
225 #if 0
226 /* These functions are useful for debugging the stack marking code */
227
228 static char
229 tos_char(int64_t stack) {
230 switch(top_of_stack(stack)) {
231 case Iterator:
232 return 'I';
233 case Except:
234 return 'E';
235 case Object:
236 return 'O';
237 case Lasti:
238 return 'L';
239 case Null:
240 return 'N';
241 }
242 return '?';
243 }
244
245 static void
246 print_stack(int64_t stack) {
247 if (stack < 0) {
248 if (stack == UNINITIALIZED) {
249 printf("---");
250 }
251 else if (stack == OVERFLOWED) {
252 printf("OVERFLOWED");
253 }
254 else {
255 printf("??");
256 }
257 return;
258 }
259 while (stack) {
260 printf("%c", tos_char(stack));
261 stack = pop_value(stack);
262 }
263 }
264
265 static void
266 print_stacks(int64_t *stacks, int n) {
267 for (int i = 0; i < n; i++) {
268 printf("%d: ", i);
269 print_stack(stacks[i]);
270 printf("\n");
271 }
272 }
273
274 #endif
275
276 static int64_t *
277 mark_stacks(PyCodeObject *code_obj, int len)
278 {
279 PyObject *co_code = _PyCode_GetCode(code_obj);
280 if (co_code == NULL) {
281 return NULL;
282 }
283 _Py_CODEUNIT *code = (_Py_CODEUNIT *)PyBytes_AS_STRING(co_code);
284 int64_t *stacks = PyMem_New(int64_t, len+1);
285 int i, j, opcode;
286
287 if (stacks == NULL) {
288 PyErr_NoMemory();
289 Py_DECREF(co_code);
290 return NULL;
291 }
292 for (int i = 1; i <= len; i++) {
293 stacks[i] = UNINITIALIZED;
294 }
295 stacks[0] = EMPTY_STACK;
296 if (code_obj->co_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR))
297 {
298 // Generators get sent None while starting:
299 stacks[0] = push_value(stacks[0], Object);
300 }
301 int todo = 1;
302 while (todo) {
303 todo = 0;
304 /* Scan instructions */
305 for (i = 0; i < len; i++) {
306 int64_t next_stack = stacks[i];
307 if (next_stack == UNINITIALIZED) {
308 continue;
309 }
310 opcode = _Py_OPCODE(code[i]);
311 switch (opcode) {
312 case JUMP_IF_FALSE_OR_POP:
313 case JUMP_IF_TRUE_OR_POP:
314 case POP_JUMP_FORWARD_IF_FALSE:
315 case POP_JUMP_BACKWARD_IF_FALSE:
316 case POP_JUMP_FORWARD_IF_TRUE:
317 case POP_JUMP_BACKWARD_IF_TRUE:
318 case POP_JUMP_FORWARD_IF_NONE:
319 case POP_JUMP_BACKWARD_IF_NONE:
320 case POP_JUMP_FORWARD_IF_NOT_NONE:
321 case POP_JUMP_BACKWARD_IF_NOT_NONE:
322 {
323 int64_t target_stack;
324 int j = get_arg(code, i);
325 if (opcode == POP_JUMP_FORWARD_IF_FALSE ||
326 opcode == POP_JUMP_FORWARD_IF_TRUE ||
327 opcode == JUMP_IF_FALSE_OR_POP ||
328 opcode == JUMP_IF_TRUE_OR_POP ||
329 opcode == POP_JUMP_FORWARD_IF_NONE ||
330 opcode == POP_JUMP_FORWARD_IF_NOT_NONE)
331 {
332 j += i + 1;
333 }
334 else {
335 assert(opcode == POP_JUMP_BACKWARD_IF_FALSE ||
336 opcode == POP_JUMP_BACKWARD_IF_TRUE ||
337 opcode == POP_JUMP_BACKWARD_IF_NONE ||
338 opcode == POP_JUMP_BACKWARD_IF_NOT_NONE);
339 j = i + 1 - j;
340 }
341 assert(j < len);
342 if (stacks[j] == UNINITIALIZED && j < i) {
343 todo = 1;
344 }
345 if (opcode == JUMP_IF_FALSE_OR_POP ||
346 opcode == JUMP_IF_TRUE_OR_POP)
347 {
348 target_stack = next_stack;
349 next_stack = pop_value(next_stack);
350 }
351 else {
352 next_stack = pop_value(next_stack);
353 target_stack = next_stack;
354 }
355 assert(stacks[j] == UNINITIALIZED || stacks[j] == target_stack);
356 stacks[j] = target_stack;
357 stacks[i+1] = next_stack;
358 break;
359 }
360 case SEND:
361 j = get_arg(code, i) + i + 1;
362 assert(j < len);
363 assert(stacks[j] == UNINITIALIZED || stacks[j] == pop_value(next_stack));
364 stacks[j] = pop_value(next_stack);
365 stacks[i+1] = next_stack;
366 break;
367 case JUMP_FORWARD:
368 j = get_arg(code, i) + i + 1;
369 assert(j < len);
370 assert(stacks[j] == UNINITIALIZED || stacks[j] == next_stack);
371 stacks[j] = next_stack;
372 break;
373 case JUMP_BACKWARD:
374 case JUMP_BACKWARD_NO_INTERRUPT:
375 j = i + 1 - get_arg(code, i);
376 assert(j >= 0);
377 assert(j < len);
378 if (stacks[j] == UNINITIALIZED && j < i) {
379 todo = 1;
380 }
381 assert(stacks[j] == UNINITIALIZED || stacks[j] == next_stack);
382 stacks[j] = next_stack;
383 break;
384 case GET_ITER:
385 case GET_AITER:
386 next_stack = push_value(pop_value(next_stack), Iterator);
387 stacks[i+1] = next_stack;
388 break;
389 case FOR_ITER:
390 {
391 int64_t target_stack = pop_value(next_stack);
392 stacks[i+1] = push_value(next_stack, Object);
393 j = get_arg(code, i) + i + 1;
394 assert(j < len);
395 assert(stacks[j] == UNINITIALIZED || stacks[j] == target_stack);
396 stacks[j] = target_stack;
397 break;
398 }
399 case END_ASYNC_FOR:
400 next_stack = pop_value(pop_value(next_stack));
401 stacks[i+1] = next_stack;
402 break;
403 case PUSH_EXC_INFO:
404 next_stack = push_value(next_stack, Except);
405 stacks[i+1] = next_stack;
406 break;
407 case POP_EXCEPT:
408 assert(top_of_stack(next_stack) == Except);
409 next_stack = pop_value(next_stack);
410 stacks[i+1] = next_stack;
411 break;
412 case RETURN_VALUE:
413 assert(pop_value(next_stack) == EMPTY_STACK);
414 assert(top_of_stack(next_stack) == Object);
415 break;
416 case RAISE_VARARGS:
417 break;
418 case RERAISE:
419 assert(top_of_stack(next_stack) == Except);
420 /* End of block */
421 break;
422 case PUSH_NULL:
423 next_stack = push_value(next_stack, Null);
424 stacks[i+1] = next_stack;
425 break;
426 case LOAD_GLOBAL:
427 if (_Py_OPARG(code[i]) & 1) {
428 next_stack = push_value(next_stack, Null);
429 }
430 next_stack = push_value(next_stack, Object);
431 stacks[i+1] = next_stack;
432 break;
433 case LOAD_METHOD:
434 assert(top_of_stack(next_stack) == Object);
435 next_stack = pop_value(next_stack);
436 next_stack = push_value(next_stack, Null);
437 next_stack = push_value(next_stack, Object);
438 stacks[i+1] = next_stack;
439 break;
440 case CALL:
441 {
442 next_stack = pop_value(pop_value(next_stack));
443 next_stack = push_value(next_stack, Object);
444 stacks[i+1] = next_stack;
445 break;
446 }
447 case SWAP:
448 {
449 int n = get_arg(code, i);
450 next_stack = stack_swap(next_stack, n);
451 stacks[i+1] = next_stack;
452 break;
453 }
454 case COPY:
455 {
456 int n = get_arg(code, i);
457 next_stack = push_value(next_stack, peek(next_stack, n));
458 stacks[i+1] = next_stack;
459 break;
460 }
461 default:
462 {
463 int delta = PyCompile_OpcodeStackEffect(opcode, get_arg(code, i));
464 assert(delta != PY_INVALID_STACK_EFFECT);
465 while (delta < 0) {
466 next_stack = pop_value(next_stack);
467 delta++;
468 }
469 while (delta > 0) {
470 next_stack = push_value(next_stack, Object);
471 delta--;
472 }
473 stacks[i+1] = next_stack;
474 }
475 }
476 }
477 /* Scan exception table */
478 unsigned char *start = (unsigned char *)PyBytes_AS_STRING(code_obj->co_exceptiontable);
479 unsigned char *end = start + PyBytes_GET_SIZE(code_obj->co_exceptiontable);
480 unsigned char *scan = start;
481 while (scan < end) {
482 int start_offset, size, handler;
483 scan = parse_varint(scan, &start_offset);
484 assert(start_offset >= 0 && start_offset < len);
485 scan = parse_varint(scan, &size);
486 assert(size >= 0 && start_offset+size <= len);
487 scan = parse_varint(scan, &handler);
488 assert(handler >= 0 && handler < len);
489 int depth_and_lasti;
490 scan = parse_varint(scan, &depth_and_lasti);
491 int level = depth_and_lasti >> 1;
492 int lasti = depth_and_lasti & 1;
493 if (stacks[start_offset] != UNINITIALIZED) {
494 if (stacks[handler] == UNINITIALIZED) {
495 todo = 1;
496 uint64_t target_stack = pop_to_level(stacks[start_offset], level);
497 if (lasti) {
498 target_stack = push_value(target_stack, Lasti);
499 }
500 target_stack = push_value(target_stack, Except);
501 stacks[handler] = target_stack;
502 }
503 }
504 }
505 }
506 Py_DECREF(co_code);
507 return stacks;
508 }
509
510 static int
511 compatible_stack(int64_t from_stack, int64_t to_stack)
512 {
513 if (from_stack < 0 || to_stack < 0) {
514 return 0;
515 }
516 while(from_stack > to_stack) {
517 from_stack = pop_value(from_stack);
518 }
519 while(from_stack) {
520 Kind from_top = top_of_stack(from_stack);
521 Kind to_top = top_of_stack(to_stack);
522 if (!compatible_kind(from_top, to_top)) {
523 return 0;
524 }
525 from_stack = pop_value(from_stack);
526 to_stack = pop_value(to_stack);
527 }
528 return to_stack == 0;
529 }
530
531 static const char *
532 explain_incompatible_stack(int64_t to_stack)
533 {
534 assert(to_stack != 0);
535 if (to_stack == OVERFLOWED) {
536 return "stack is too deep to analyze";
537 }
538 if (to_stack == UNINITIALIZED) {
539 return "can't jump into an exception handler, or code may be unreachable";
540 }
541 Kind target_kind = top_of_stack(to_stack);
542 switch(target_kind) {
543 case Except:
544 return "can't jump into an 'except' block as there's no exception";
545 case Lasti:
546 return "can't jump into a re-raising block as there's no location";
547 case Object:
548 case Null:
549 return "incompatible stacks";
550 case Iterator:
551 return "can't jump into the body of a for loop";
552 default:
553 Py_UNREACHABLE();
554 }
555 }
556
557 static int *
558 marklines(PyCodeObject *code, int len)
559 {
560 PyCodeAddressRange bounds;
561 _PyCode_InitAddressRange(code, &bounds);
562 assert (bounds.ar_end == 0);
563 int last_line = -1;
564
565 int *linestarts = PyMem_New(int, len);
566 if (linestarts == NULL) {
567 return NULL;
568 }
569 for (int i = 0; i < len; i++) {
570 linestarts[i] = -1;
571 }
572
573 while (_PyLineTable_NextAddressRange(&bounds)) {
574 assert(bounds.ar_start / (int)sizeof(_Py_CODEUNIT) < len);
575 if (bounds.ar_line != last_line && bounds.ar_line != -1) {
576 linestarts[bounds.ar_start / sizeof(_Py_CODEUNIT)] = bounds.ar_line;
577 last_line = bounds.ar_line;
578 }
579 }
580 return linestarts;
581 }
582
583 static int
584 first_line_not_before(int *lines, int len, int line)
585 {
586 int result = INT_MAX;
587 for (int i = 0; i < len; i++) {
588 if (lines[i] < result && lines[i] >= line) {
589 result = lines[i];
590 }
591 }
592 if (result == INT_MAX) {
593 return -1;
594 }
595 return result;
596 }
597
598 static PyFrameState
599 _PyFrame_GetState(PyFrameObject *frame)
600 {
601 assert(!_PyFrame_IsIncomplete(frame->f_frame));
602 if (frame->f_frame->stacktop == 0) {
603 return FRAME_CLEARED;
604 }
605 switch(frame->f_frame->owner) {
606 case FRAME_OWNED_BY_GENERATOR:
607 {
608 PyGenObject *gen = _PyFrame_GetGenerator(frame->f_frame);
609 return gen->gi_frame_state;
610 }
611 case FRAME_OWNED_BY_THREAD:
612 {
613 if (_PyInterpreterFrame_LASTI(frame->f_frame) < 0) {
614 return FRAME_CREATED;
615 }
616 switch (_PyOpcode_Deopt[_Py_OPCODE(*frame->f_frame->prev_instr)])
617 {
618 case COPY_FREE_VARS:
619 case MAKE_CELL:
620 case RETURN_GENERATOR:
621 /* Frame not fully initialized */
622 return FRAME_CREATED;
623 default:
624 return FRAME_EXECUTING;
625 }
626 }
627 case FRAME_OWNED_BY_FRAME_OBJECT:
628 return FRAME_COMPLETED;
629 }
630 Py_UNREACHABLE();
631 }
632
633
634 /* Setter for f_lineno - you can set f_lineno from within a trace function in
635 * order to jump to a given line of code, subject to some restrictions. Most
636 * lines are OK to jump to because they don't make any assumptions about the
637 * state of the stack (obvious because you could remove the line and the code
638 * would still work without any stack errors), but there are some constructs
639 * that limit jumping:
640 *
641 * o Any exception handlers.
642 * o 'for' and 'async for' loops can't be jumped into because the
643 * iterator needs to be on the stack.
644 * o Jumps cannot be made from within a trace function invoked with a
645 * 'return' or 'exception' event since the eval loop has been exited at
646 * that time.
647 */
648 static int
649 frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignored))
650 {
651 if (p_new_lineno == NULL) {
652 PyErr_SetString(PyExc_AttributeError, "cannot delete attribute");
653 return -1;
654 }
655 /* f_lineno must be an integer. */
656 if (!PyLong_CheckExact(p_new_lineno)) {
657 PyErr_SetString(PyExc_ValueError,
658 "lineno must be an integer");
659 return -1;
660 }
661
662 PyFrameState state = _PyFrame_GetState(f);
663 /*
664 * This code preserves the historical restrictions on
665 * setting the line number of a frame.
666 * Jumps are forbidden on a 'return' trace event (except after a yield).
667 * Jumps from 'call' trace events are also forbidden.
668 * In addition, jumps are forbidden when not tracing,
669 * as this is a debugging feature.
670 */
671 switch(PyThreadState_GET()->tracing_what) {
672 case PyTrace_EXCEPTION:
673 PyErr_SetString(PyExc_ValueError,
674 "can only jump from a 'line' trace event");
675 return -1;
676 case PyTrace_CALL:
677 PyErr_Format(PyExc_ValueError,
678 "can't jump from the 'call' trace event of a new frame");
679 return -1;
680 case PyTrace_LINE:
681 break;
682 case PyTrace_RETURN:
683 if (state == FRAME_SUSPENDED) {
684 break;
685 }
686 /* fall through */
687 default:
688 PyErr_SetString(PyExc_ValueError,
689 "can only jump from a 'line' trace event");
690 return -1;
691 }
692 if (!f->f_trace) {
693 PyErr_Format(PyExc_ValueError,
694 "f_lineno can only be set by a trace function");
695 return -1;
696 }
697
698 int new_lineno;
699
700 /* Fail if the line falls outside the code block and
701 select first line with actual code. */
702 int overflow;
703 long l_new_lineno = PyLong_AsLongAndOverflow(p_new_lineno, &overflow);
704 if (overflow
705 #if SIZEOF_LONG > SIZEOF_INT
706 || l_new_lineno > INT_MAX
707 || l_new_lineno < INT_MIN
708 #endif
709 ) {
710 PyErr_SetString(PyExc_ValueError,
711 "lineno out of range");
712 return -1;
713 }
714 new_lineno = (int)l_new_lineno;
715
716 if (new_lineno < f->f_frame->f_code->co_firstlineno) {
717 PyErr_Format(PyExc_ValueError,
718 "line %d comes before the current code block",
719 new_lineno);
720 return -1;
721 }
722
723 /* PyCode_NewWithPosOnlyArgs limits co_code to be under INT_MAX so this
724 * should never overflow. */
725 int len = (int)Py_SIZE(f->f_frame->f_code);
726 int *lines = marklines(f->f_frame->f_code, len);
727 if (lines == NULL) {
728 return -1;
729 }
730
731 new_lineno = first_line_not_before(lines, len, new_lineno);
732 if (new_lineno < 0) {
733 PyErr_Format(PyExc_ValueError,
734 "line %d comes after the current code block",
735 (int)l_new_lineno);
736 PyMem_Free(lines);
737 return -1;
738 }
739
740 int64_t *stacks = mark_stacks(f->f_frame->f_code, len);
741 if (stacks == NULL) {
742 PyMem_Free(lines);
743 return -1;
744 }
745
746 int64_t best_stack = OVERFLOWED;
747 int best_addr = -1;
748 int64_t start_stack = stacks[_PyInterpreterFrame_LASTI(f->f_frame)];
749 int err = -1;
750 const char *msg = "cannot find bytecode for specified line";
751 for (int i = 0; i < len; i++) {
752 if (lines[i] == new_lineno) {
753 int64_t target_stack = stacks[i];
754 if (compatible_stack(start_stack, target_stack)) {
755 err = 0;
756 if (target_stack > best_stack) {
757 best_stack = target_stack;
758 best_addr = i;
759 }
760 }
761 else if (err < 0) {
762 if (start_stack == OVERFLOWED) {
763 msg = "stack to deep to analyze";
764 }
765 else if (start_stack == UNINITIALIZED) {
766 msg = "can't jump from unreachable code";
767 }
768 else {
769 msg = explain_incompatible_stack(target_stack);
770 err = 1;
771 }
772 }
773 }
774 }
775 PyMem_Free(stacks);
776 PyMem_Free(lines);
777 if (err) {
778 PyErr_SetString(PyExc_ValueError, msg);
779 return -1;
780 }
781 if (state == FRAME_SUSPENDED) {
782 /* Account for value popped by yield */
783 start_stack = pop_value(start_stack);
784 }
785 while (start_stack > best_stack) {
786 if (top_of_stack(start_stack) == Except) {
787 /* Pop exception stack as well as the evaluation stack */
788 PyThreadState *tstate = _PyThreadState_GET();
789 _PyErr_StackItem *exc_info = tstate->exc_info;
790 PyObject *value = exc_info->exc_value;
791 PyObject *exc = _PyFrame_StackPop(f->f_frame);
792 assert(PyExceptionInstance_Check(exc) || exc == Py_None);
793 exc_info->exc_value = exc;
794 Py_XDECREF(value);
795 }
796 else {
797 PyObject *v = _PyFrame_StackPop(f->f_frame);
798 Py_XDECREF(v);
799 }
800 start_stack = pop_value(start_stack);
801 }
802 /* Finally set the new lasti and return OK. */
803 f->f_lineno = 0;
804 f->f_frame->prev_instr = _PyCode_CODE(f->f_frame->f_code) + best_addr;
805 return 0;
806 }
807
808 static PyObject *
809 frame_gettrace(PyFrameObject *f, void *closure)
810 {
811 PyObject* trace = f->f_trace;
812
813 if (trace == NULL)
814 trace = Py_None;
815
816 Py_INCREF(trace);
817
818 return trace;
819 }
820
821 static int
822 frame_settrace(PyFrameObject *f, PyObject* v, void *closure)
823 {
824 if (v == Py_None) {
825 v = NULL;
826 }
827 Py_XINCREF(v);
828 Py_XSETREF(f->f_trace, v);
829
830 return 0;
831 }
832
833
834 static PyGetSetDef frame_getsetlist[] = {
835 {"f_back", (getter)frame_getback, NULL, NULL},
836 {"f_locals", (getter)frame_getlocals, NULL, NULL},
837 {"f_lineno", (getter)frame_getlineno,
838 (setter)frame_setlineno, NULL},
839 {"f_trace", (getter)frame_gettrace, (setter)frame_settrace, NULL},
840 {"f_lasti", (getter)frame_getlasti, NULL, NULL},
841 {"f_globals", (getter)frame_getglobals, NULL, NULL},
842 {"f_builtins", (getter)frame_getbuiltins, NULL, NULL},
843 {"f_code", (getter)frame_getcode, NULL, NULL},
844 {0}
845 };
846
847 /* Stack frames are allocated and deallocated at a considerable rate.
848 In an attempt to improve the speed of function calls, we maintain
849 a separate free list of stack frames (just like floats are
850 allocated in a special way -- see floatobject.c). When a stack
851 frame is on the free list, only the following members have a meaning:
852 ob_type == &Frametype
853 f_back next item on free list, or NULL
854 */
855
856 static void
857 frame_dealloc(PyFrameObject *f)
858 {
859 /* It is the responsibility of the owning generator/coroutine
860 * to have cleared the generator pointer */
861
862 if (_PyObject_GC_IS_TRACKED(f)) {
863 _PyObject_GC_UNTRACK(f);
864 }
865
866 Py_TRASHCAN_BEGIN(f, frame_dealloc);
867 PyCodeObject *co = NULL;
868
869 /* GH-106092: If f->f_frame was on the stack and we reached the maximum
870 * nesting depth for deallocations, the trashcan may have delayed this
871 * deallocation until after f->f_frame is freed. Avoid dereferencing
872 * f->f_frame unless we know it still points to valid memory. */
873 _PyInterpreterFrame *frame = (_PyInterpreterFrame *)f->_f_frame_data;
874
875 /* Kill all local variables including specials, if we own them */
876 if (f->f_frame == frame && frame->owner == FRAME_OWNED_BY_FRAME_OBJECT) {
877 /* Don't clear code object until the end */
878 co = frame->f_code;
879 frame->f_code = NULL;
880 Py_CLEAR(frame->f_func);
881 Py_CLEAR(frame->f_locals);
882 PyObject **locals = _PyFrame_GetLocalsArray(frame);
883 for (int i = 0; i < frame->stacktop; i++) {
884 Py_CLEAR(locals[i]);
885 }
886 }
887 Py_CLEAR(f->f_back);
888 Py_CLEAR(f->f_trace);
889 PyObject_GC_Del(f);
890 Py_XDECREF(co);
891 Py_TRASHCAN_END;
892 }
893
894 static int
895 frame_traverse(PyFrameObject *f, visitproc visit, void *arg)
896 {
897 Py_VISIT(f->f_back);
898 Py_VISIT(f->f_trace);
899 if (f->f_frame->owner != FRAME_OWNED_BY_FRAME_OBJECT) {
900 return 0;
901 }
902 assert(f->f_frame->frame_obj == NULL);
903 return _PyFrame_Traverse(f->f_frame, visit, arg);
904 }
905
906 static int
907 frame_tp_clear(PyFrameObject *f)
908 {
909 Py_CLEAR(f->f_trace);
910
911 /* locals and stack */
912 PyObject **locals = _PyFrame_GetLocalsArray(f->f_frame);
913 assert(f->f_frame->stacktop >= 0);
914 for (int i = 0; i < f->f_frame->stacktop; i++) {
915 Py_CLEAR(locals[i]);
916 }
917 f->f_frame->stacktop = 0;
918 return 0;
919 }
920
921 static PyObject *
922 frame_clear(PyFrameObject *f, PyObject *Py_UNUSED(ignored))
923 {
924 if (f->f_frame->owner == FRAME_OWNED_BY_GENERATOR) {
925 PyGenObject *gen = _PyFrame_GetGenerator(f->f_frame);
926 if (gen->gi_frame_state == FRAME_EXECUTING) {
927 goto running;
928 }
929 _PyGen_Finalize((PyObject *)gen);
930 }
931 else if (f->f_frame->owner == FRAME_OWNED_BY_THREAD) {
932 goto running;
933 }
934 else {
935 assert(f->f_frame->owner == FRAME_OWNED_BY_FRAME_OBJECT);
936 (void)frame_tp_clear(f);
937 }
938 Py_RETURN_NONE;
939 running:
940 PyErr_SetString(PyExc_RuntimeError,
941 "cannot clear an executing frame");
942 return NULL;
943 }
944
945 PyDoc_STRVAR(clear__doc__,
946 "F.clear(): clear most references held by the frame");
947
948 static PyObject *
949 frame_sizeof(PyFrameObject *f, PyObject *Py_UNUSED(ignored))
950 {
951 Py_ssize_t res;
952 res = offsetof(PyFrameObject, _f_frame_data) + offsetof(_PyInterpreterFrame, localsplus);
953 PyCodeObject *code = f->f_frame->f_code;
954 res += (code->co_nlocalsplus+code->co_stacksize) * sizeof(PyObject *);
955 return PyLong_FromSsize_t(res);
956 }
957
958 PyDoc_STRVAR(sizeof__doc__,
959 "F.__sizeof__() -> size of F in memory, in bytes");
960
961 static PyObject *
962 frame_repr(PyFrameObject *f)
963 {
964 int lineno = PyFrame_GetLineNumber(f);
965 PyCodeObject *code = f->f_frame->f_code;
966 return PyUnicode_FromFormat(
967 "<frame at %p, file %R, line %d, code %S>",
968 f, code->co_filename, lineno, code->co_name);
969 }
970
971 static PyMethodDef frame_methods[] = {
972 {"clear", (PyCFunction)frame_clear, METH_NOARGS,
973 clear__doc__},
974 {"__sizeof__", (PyCFunction)frame_sizeof, METH_NOARGS,
975 sizeof__doc__},
976 {NULL, NULL} /* sentinel */
977 };
978
979 PyTypeObject PyFrame_Type = {
980 PyVarObject_HEAD_INIT(&PyType_Type, 0)
981 "frame",
982 offsetof(PyFrameObject, _f_frame_data) +
983 offsetof(_PyInterpreterFrame, localsplus),
984 sizeof(PyObject *),
985 (destructor)frame_dealloc, /* tp_dealloc */
986 0, /* tp_vectorcall_offset */
987 0, /* tp_getattr */
988 0, /* tp_setattr */
989 0, /* tp_as_async */
990 (reprfunc)frame_repr, /* tp_repr */
991 0, /* tp_as_number */
992 0, /* tp_as_sequence */
993 0, /* tp_as_mapping */
994 0, /* tp_hash */
995 0, /* tp_call */
996 0, /* tp_str */
997 PyObject_GenericGetAttr, /* tp_getattro */
998 PyObject_GenericSetAttr, /* tp_setattro */
999 0, /* tp_as_buffer */
1000 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
1001 0, /* tp_doc */
1002 (traverseproc)frame_traverse, /* tp_traverse */
1003 (inquiry)frame_tp_clear, /* tp_clear */
1004 0, /* tp_richcompare */
1005 0, /* tp_weaklistoffset */
1006 0, /* tp_iter */
1007 0, /* tp_iternext */
1008 frame_methods, /* tp_methods */
1009 frame_memberlist, /* tp_members */
1010 frame_getsetlist, /* tp_getset */
1011 0, /* tp_base */
1012 0, /* tp_dict */
1013 };
1014
1015 static void
1016 init_frame(_PyInterpreterFrame *frame, PyFunctionObject *func, PyObject *locals)
1017 {
1018 /* _PyFrame_InitializeSpecials consumes reference to func */
1019 Py_INCREF(func);
1020 PyCodeObject *code = (PyCodeObject *)func->func_code;
1021 _PyFrame_InitializeSpecials(frame, func, locals, code->co_nlocalsplus);
1022 frame->previous = NULL;
1023 for (Py_ssize_t i = 0; i < code->co_nlocalsplus; i++) {
1024 frame->localsplus[i] = NULL;
1025 }
1026 }
1027
1028 PyFrameObject*
1029 _PyFrame_New_NoTrack(PyCodeObject *code)
1030 {
1031 CALL_STAT_INC(frame_objects_created);
1032 int slots = code->co_nlocalsplus + code->co_stacksize;
1033 PyFrameObject *f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, slots);
1034 if (f == NULL) {
1035 return NULL;
1036 }
1037 f->f_back = NULL;
1038 f->f_trace = NULL;
1039 f->f_trace_lines = 1;
1040 f->f_trace_opcodes = 0;
1041 f->f_fast_as_locals = 0;
1042 f->f_lineno = 0;
1043 return f;
1044 }
1045
1046 /* Legacy API */
1047 PyFrameObject*
1048 PyFrame_New(PyThreadState *tstate, PyCodeObject *code,
1049 PyObject *globals, PyObject *locals)
1050 {
1051 PyObject *builtins = _PyEval_BuiltinsFromGlobals(tstate, globals); // borrowed ref
1052 if (builtins == NULL) {
1053 return NULL;
1054 }
1055 PyFrameConstructor desc = {
1056 .fc_globals = globals,
1057 .fc_builtins = builtins,
1058 .fc_name = code->co_name,
1059 .fc_qualname = code->co_name,
1060 .fc_code = (PyObject *)code,
1061 .fc_defaults = NULL,
1062 .fc_kwdefaults = NULL,
1063 .fc_closure = NULL
1064 };
1065 PyFunctionObject *func = _PyFunction_FromConstructor(&desc);
1066 if (func == NULL) {
1067 return NULL;
1068 }
1069 PyFrameObject *f = _PyFrame_New_NoTrack(code);
1070 if (f == NULL) {
1071 Py_DECREF(func);
1072 return NULL;
1073 }
1074 init_frame((_PyInterpreterFrame *)f->_f_frame_data, func, locals);
1075 f->f_frame = (_PyInterpreterFrame *)f->_f_frame_data;
1076 f->f_frame->owner = FRAME_OWNED_BY_FRAME_OBJECT;
1077 // This frame needs to be "complete", so pretend that the first RESUME ran:
1078 f->f_frame->prev_instr = _PyCode_CODE(code) + code->_co_firsttraceable;
1079 assert(!_PyFrame_IsIncomplete(f->f_frame));
1080 Py_DECREF(func);
1081 _PyObject_GC_TRACK(f);
1082 return f;
1083 }
1084
1085 static int
1086 _PyFrame_OpAlreadyRan(_PyInterpreterFrame *frame, int opcode, int oparg)
1087 {
1088 // This only works when opcode is a non-quickened form:
1089 assert(_PyOpcode_Deopt[opcode] == opcode);
1090 int check_oparg = 0;
1091 for (_Py_CODEUNIT *instruction = _PyCode_CODE(frame->f_code);
1092 instruction < frame->prev_instr; instruction++)
1093 {
1094 int check_opcode = _PyOpcode_Deopt[_Py_OPCODE(*instruction)];
1095 check_oparg |= _Py_OPARG(*instruction);
1096 if (check_opcode == opcode && check_oparg == oparg) {
1097 return 1;
1098 }
1099 if (check_opcode == EXTENDED_ARG) {
1100 check_oparg <<= 8;
1101 }
1102 else {
1103 check_oparg = 0;
1104 }
1105 instruction += _PyOpcode_Caches[check_opcode];
1106 }
1107 return 0;
1108 }
1109
1110 int
1111 _PyFrame_FastToLocalsWithError(_PyInterpreterFrame *frame) {
1112 /* Merge fast locals into f->f_locals */
1113 PyObject *locals;
1114 PyObject **fast;
1115 PyCodeObject *co;
1116 locals = frame->f_locals;
1117 if (locals == NULL) {
1118 locals = frame->f_locals = PyDict_New();
1119 if (locals == NULL)
1120 return -1;
1121 }
1122 co = frame->f_code;
1123 fast = _PyFrame_GetLocalsArray(frame);
1124 // COPY_FREE_VARS has no quickened forms, so no need to use _PyOpcode_Deopt
1125 // here:
1126 int lasti = _PyInterpreterFrame_LASTI(frame);
1127 if (lasti < 0 && _Py_OPCODE(_PyCode_CODE(co)[0]) == COPY_FREE_VARS) {
1128 /* Free vars have not been initialized -- Do that */
1129 PyCodeObject *co = frame->f_code;
1130 PyObject *closure = frame->f_func->func_closure;
1131 int offset = co->co_nlocals + co->co_nplaincellvars;
1132 for (int i = 0; i < co->co_nfreevars; ++i) {
1133 PyObject *o = PyTuple_GET_ITEM(closure, i);
1134 Py_INCREF(o);
1135 frame->localsplus[offset + i] = o;
1136 }
1137 // COPY_FREE_VARS doesn't have inline CACHEs, either:
1138 frame->prev_instr = _PyCode_CODE(frame->f_code);
1139 }
1140 for (int i = 0; i < co->co_nlocalsplus; i++) {
1141 _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i);
1142
1143 /* If the namespace is unoptimized, then one of the
1144 following cases applies:
1145 1. It does not contain free variables, because it
1146 uses import * or is a top-level namespace.
1147 2. It is a class namespace.
1148 We don't want to accidentally copy free variables
1149 into the locals dict used by the class.
1150 */
1151 if (kind & CO_FAST_FREE && !(co->co_flags & CO_OPTIMIZED)) {
1152 continue;
1153 }
1154
1155 PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i);
1156 PyObject *value = fast[i];
1157 if (frame->stacktop) {
1158 if (kind & CO_FAST_FREE) {
1159 // The cell was set by COPY_FREE_VARS.
1160 assert(value != NULL && PyCell_Check(value));
1161 value = PyCell_GET(value);
1162 }
1163 else if (kind & CO_FAST_CELL) {
1164 // Note that no *_DEREF ops can happen before MAKE_CELL
1165 // executes. So there's no need to duplicate the work
1166 // that MAKE_CELL would otherwise do later, if it hasn't
1167 // run yet.
1168 if (value != NULL) {
1169 if (PyCell_Check(value) &&
1170 _PyFrame_OpAlreadyRan(frame, MAKE_CELL, i)) {
1171 // (likely) MAKE_CELL must have executed already.
1172 value = PyCell_GET(value);
1173 }
1174 // (likely) Otherwise it it is an arg (kind & CO_FAST_LOCAL),
1175 // with the initial value set when the frame was created...
1176 // (unlikely) ...or it was set to some initial value by
1177 // an earlier call to PyFrame_LocalsToFast().
1178 }
1179 }
1180 }
1181 else {
1182 assert(value == NULL);
1183 }
1184 if (value == NULL) {
1185 if (PyObject_DelItem(locals, name) != 0) {
1186 if (PyErr_ExceptionMatches(PyExc_KeyError)) {
1187 PyErr_Clear();
1188 }
1189 else {
1190 return -1;
1191 }
1192 }
1193 }
1194 else {
1195 if (PyObject_SetItem(locals, name, value) != 0) {
1196 return -1;
1197 }
1198 }
1199 }
1200 return 0;
1201 }
1202
1203 int
1204 PyFrame_FastToLocalsWithError(PyFrameObject *f)
1205 {
1206 assert(!_PyFrame_IsIncomplete(f->f_frame));
1207 if (f == NULL) {
1208 PyErr_BadInternalCall();
1209 return -1;
1210 }
1211 int err = _PyFrame_FastToLocalsWithError(f->f_frame);
1212 if (err == 0) {
1213 f->f_fast_as_locals = 1;
1214 }
1215 return err;
1216 }
1217
1218 void
1219 PyFrame_FastToLocals(PyFrameObject *f)
1220 {
1221 int res;
1222 assert(!_PyFrame_IsIncomplete(f->f_frame));
1223 assert(!PyErr_Occurred());
1224
1225 res = PyFrame_FastToLocalsWithError(f);
1226 if (res < 0)
1227 PyErr_Clear();
1228 }
1229
1230 void
1231 _PyFrame_LocalsToFast(_PyInterpreterFrame *frame, int clear)
1232 {
1233 /* Merge locals into fast locals */
1234 PyObject *locals;
1235 PyObject **fast;
1236 PyObject *error_type, *error_value, *error_traceback;
1237 PyCodeObject *co;
1238 locals = frame->f_locals;
1239 if (locals == NULL) {
1240 return;
1241 }
1242 fast = _PyFrame_GetLocalsArray(frame);
1243 co = frame->f_code;
1244
1245 PyErr_Fetch(&error_type, &error_value, &error_traceback);
1246 for (int i = 0; i < co->co_nlocalsplus; i++) {
1247 _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i);
1248
1249 /* Same test as in PyFrame_FastToLocals() above. */
1250 if (kind & CO_FAST_FREE && !(co->co_flags & CO_OPTIMIZED)) {
1251 continue;
1252 }
1253 PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i);
1254 PyObject *value = PyObject_GetItem(locals, name);
1255 /* We only care about NULLs if clear is true. */
1256 if (value == NULL) {
1257 PyErr_Clear();
1258 if (!clear) {
1259 continue;
1260 }
1261 }
1262 PyObject *oldvalue = fast[i];
1263 PyObject *cell = NULL;
1264 if (kind == CO_FAST_FREE) {
1265 // The cell was set when the frame was created from
1266 // the function's closure.
1267 assert(oldvalue != NULL && PyCell_Check(oldvalue));
1268 cell = oldvalue;
1269 }
1270 else if (kind & CO_FAST_CELL && oldvalue != NULL) {
1271 /* Same test as in PyFrame_FastToLocals() above. */
1272 if (PyCell_Check(oldvalue) &&
1273 _PyFrame_OpAlreadyRan(frame, MAKE_CELL, i)) {
1274 // (likely) MAKE_CELL must have executed already.
1275 cell = oldvalue;
1276 }
1277 // (unlikely) Otherwise, it must have been set to some
1278 // initial value by an earlier call to PyFrame_LocalsToFast().
1279 }
1280 if (cell != NULL) {
1281 oldvalue = PyCell_GET(cell);
1282 if (value != oldvalue) {
1283 Py_XDECREF(oldvalue);
1284 Py_XINCREF(value);
1285 PyCell_SET(cell, value);
1286 }
1287 }
1288 else if (value != oldvalue) {
1289 Py_XINCREF(value);
1290 Py_XSETREF(fast[i], value);
1291 }
1292 Py_XDECREF(value);
1293 }
1294 PyErr_Restore(error_type, error_value, error_traceback);
1295 }
1296
1297 void
1298 PyFrame_LocalsToFast(PyFrameObject *f, int clear)
1299 {
1300 assert(!_PyFrame_IsIncomplete(f->f_frame));
1301 if (f && f->f_fast_as_locals && _PyFrame_GetState(f) != FRAME_CLEARED) {
1302 _PyFrame_LocalsToFast(f->f_frame, clear);
1303 f->f_fast_as_locals = 0;
1304 }
1305 }
1306
1307
1308 int _PyFrame_IsEntryFrame(PyFrameObject *frame)
1309 {
1310 assert(frame != NULL);
1311 assert(!_PyFrame_IsIncomplete(frame->f_frame));
1312 return frame->f_frame->is_entry;
1313 }
1314
1315
1316 PyCodeObject *
1317 PyFrame_GetCode(PyFrameObject *frame)
1318 {
1319 assert(frame != NULL);
1320 assert(!_PyFrame_IsIncomplete(frame->f_frame));
1321 PyCodeObject *code = frame->f_frame->f_code;
1322 assert(code != NULL);
1323 Py_INCREF(code);
1324 return code;
1325 }
1326
1327
1328 PyFrameObject*
1329 PyFrame_GetBack(PyFrameObject *frame)
1330 {
1331 assert(frame != NULL);
1332 assert(!_PyFrame_IsIncomplete(frame->f_frame));
1333 PyFrameObject *back = frame->f_back;
1334 if (back == NULL) {
1335 _PyInterpreterFrame *prev = frame->f_frame->previous;
1336 while (prev && _PyFrame_IsIncomplete(prev)) {
1337 prev = prev->previous;
1338 }
1339 if (prev) {
1340 back = _PyFrame_GetFrameObject(prev);
1341 }
1342 }
1343 Py_XINCREF(back);
1344 return back;
1345 }
1346
1347 PyObject*
1348 PyFrame_GetLocals(PyFrameObject *frame)
1349 {
1350 assert(!_PyFrame_IsIncomplete(frame->f_frame));
1351 return frame_getlocals(frame, NULL);
1352 }
1353
1354 PyObject*
1355 PyFrame_GetGlobals(PyFrameObject *frame)
1356 {
1357 assert(!_PyFrame_IsIncomplete(frame->f_frame));
1358 return frame_getglobals(frame, NULL);
1359 }
1360
1361 PyObject*
1362 PyFrame_GetBuiltins(PyFrameObject *frame)
1363 {
1364 assert(!_PyFrame_IsIncomplete(frame->f_frame));
1365 return frame_getbuiltins(frame, NULL);
1366 }
1367
1368 int
1369 PyFrame_GetLasti(PyFrameObject *frame)
1370 {
1371 assert(!_PyFrame_IsIncomplete(frame->f_frame));
1372 int lasti = _PyInterpreterFrame_LASTI(frame->f_frame);
1373 if (lasti < 0) {
1374 return -1;
1375 }
1376 return lasti * sizeof(_Py_CODEUNIT);
1377 }
1378
1379 PyObject *
1380 PyFrame_GetGenerator(PyFrameObject *frame)
1381 {
1382 assert(!_PyFrame_IsIncomplete(frame->f_frame));
1383 if (frame->f_frame->owner != FRAME_OWNED_BY_GENERATOR) {
1384 return NULL;
1385 }
1386 PyGenObject *gen = _PyFrame_GetGenerator(frame->f_frame);
1387 return Py_NewRef(gen);
1388 }
1389
1390 PyObject*
1391 _PyEval_BuiltinsFromGlobals(PyThreadState *tstate, PyObject *globals)
1392 {
1393 PyObject *builtins = PyDict_GetItemWithError(globals, &_Py_ID(__builtins__));
1394 if (builtins) {
1395 if (PyModule_Check(builtins)) {
1396 builtins = _PyModule_GetDict(builtins);
1397 assert(builtins != NULL);
1398 }
1399 return builtins;
1400 }
1401 if (PyErr_Occurred()) {
1402 return NULL;
1403 }
1404
1405 return _PyEval_GetBuiltins(tstate);
1406 }
1407
1408