@@ -7061,7 +7061,6 @@ struct assembler {
7061
7061
PyObject * a_except_table ; /* bytes containing exception table */
7062
7062
basicblock * a_entry ;
7063
7063
int a_offset ; /* offset into bytecode */
7064
- int a_nblocks ; /* number of reachable blocks */
7065
7064
int a_except_table_off ; /* offset into exception table */
7066
7065
int a_prevlineno ; /* lineno of last emitted line in line table */
7067
7066
int a_prev_end_lineno ; /* end_lineno of last emitted line in line table */
@@ -7074,6 +7073,20 @@ struct assembler {
7074
7073
int a_location_off ; /* offset of last written location info frame */
7075
7074
};
7076
7075
7076
+ static basicblock * *
7077
+ make_cfg_traversal_stack (basicblock * entry ) {
7078
+ int nblocks = 0 ;
7079
+ for (basicblock * b = entry ; b != NULL ; b = b -> b_next ) {
7080
+ b -> b_visited = 0 ;
7081
+ nblocks ++ ;
7082
+ }
7083
+ basicblock * * stack = (basicblock * * )PyMem_Malloc (sizeof (basicblock * ) * nblocks );
7084
+ if (!stack ) {
7085
+ PyErr_NoMemory ();
7086
+ }
7087
+ return stack ;
7088
+ }
7089
+
7077
7090
Py_LOCAL_INLINE (void )
7078
7091
stackdepth_push (basicblock * * * sp , basicblock * b , int depth )
7079
7092
{
@@ -7089,31 +7102,26 @@ stackdepth_push(basicblock ***sp, basicblock *b, int depth)
7089
7102
* cycles in the flow graph have no net effect on the stack depth.
7090
7103
*/
7091
7104
static int
7092
- stackdepth (struct compiler * c )
7105
+ stackdepth (struct compiler * c , basicblock * entry )
7093
7106
{
7094
- basicblock * b , * entryblock = NULL ;
7095
- basicblock * * stack , * * sp ;
7096
- int nblocks = 0 , maxdepth = 0 ;
7097
- for (b = c -> u -> u_blocks ; b != NULL ; b = b -> b_list ) {
7107
+ for (basicblock * b = entry ; b != NULL ; b = b -> b_next ) {
7098
7108
b -> b_startdepth = INT_MIN ;
7099
- entryblock = b ;
7100
- nblocks ++ ;
7101
7109
}
7102
- assert (entryblock != NULL );
7103
- stack = (basicblock * * )PyObject_Malloc (sizeof (basicblock * ) * nblocks );
7110
+ basicblock * * stack = make_cfg_traversal_stack (entry );
7104
7111
if (!stack ) {
7105
- PyErr_NoMemory ();
7106
7112
return -1 ;
7107
7113
}
7108
7114
7109
- sp = stack ;
7115
+ int maxdepth = 0 ;
7116
+ basicblock * * sp = stack ;
7110
7117
if (c -> u -> u_ste -> ste_generator || c -> u -> u_ste -> ste_coroutine ) {
7111
- stackdepth_push (& sp , entryblock , 1 );
7118
+ stackdepth_push (& sp , entry , 1 );
7112
7119
} else {
7113
- stackdepth_push (& sp , entryblock , 0 );
7120
+ stackdepth_push (& sp , entry , 0 );
7114
7121
}
7122
+
7115
7123
while (sp != stack ) {
7116
- b = * -- sp ;
7124
+ basicblock * b = * -- sp ;
7117
7125
int depth = b -> b_startdepth ;
7118
7126
assert (depth >= 0 );
7119
7127
basicblock * next = b -> b_next ;
@@ -7159,7 +7167,7 @@ stackdepth(struct compiler *c)
7159
7167
stackdepth_push (& sp , next , depth );
7160
7168
}
7161
7169
}
7162
- PyObject_Free (stack );
7170
+ PyMem_Free (stack );
7163
7171
return maxdepth ;
7164
7172
}
7165
7173
@@ -7264,14 +7272,8 @@ copy_except_stack(ExceptStack *stack) {
7264
7272
7265
7273
static int
7266
7274
label_exception_targets (basicblock * entry ) {
7267
- int nblocks = 0 ;
7268
- for (basicblock * b = entry ; b != NULL ; b = b -> b_next ) {
7269
- b -> b_visited = 0 ;
7270
- nblocks ++ ;
7271
- }
7272
- basicblock * * todo_stack = PyMem_Malloc (sizeof (basicblock * )* nblocks );
7275
+ basicblock * * todo_stack = make_cfg_traversal_stack (entry );
7273
7276
if (todo_stack == NULL ) {
7274
- PyErr_NoMemory ();
7275
7277
return -1 ;
7276
7278
}
7277
7279
ExceptStack * except_stack = make_except_stack ();
@@ -8051,7 +8053,7 @@ static int
8051
8053
optimize_cfg (struct compiler * c , struct assembler * a , PyObject * consts );
8052
8054
8053
8055
static int
8054
- trim_unused_consts (struct compiler * c , struct assembler * a , PyObject * consts );
8056
+ trim_unused_consts (struct assembler * a , PyObject * consts );
8055
8057
8056
8058
/* Duplicates exit BBs, so that line numbers can be propagated to them */
8057
8059
static int
@@ -8347,7 +8349,6 @@ assemble(struct compiler *c, int addNone)
8347
8349
if (!assemble_init (& a , nblocks , c -> u -> u_firstlineno ))
8348
8350
goto error ;
8349
8351
a .a_entry = entryblock ;
8350
- a .a_nblocks = nblocks ;
8351
8352
8352
8353
int numdropped = fix_cell_offsets (c , entryblock , cellfixedoffsets );
8353
8354
PyMem_Free (cellfixedoffsets ); // At this point we're done with it.
@@ -8368,12 +8369,12 @@ assemble(struct compiler *c, int addNone)
8368
8369
if (duplicate_exits_without_lineno (c )) {
8369
8370
return NULL ;
8370
8371
}
8371
- if (trim_unused_consts (c , & a , consts )) {
8372
+ if (trim_unused_consts (& a , consts )) {
8372
8373
goto error ;
8373
8374
}
8374
8375
propagate_line_numbers (& a );
8375
8376
guarantee_lineno_for_exits (& a , c -> u -> u_firstlineno );
8376
- int maxdepth = stackdepth (c );
8377
+ int maxdepth = stackdepth (c , entryblock );
8377
8378
if (maxdepth < 0 ) {
8378
8379
goto error ;
8379
8380
}
@@ -9081,17 +9082,19 @@ normalize_basic_block(basicblock *bb) {
9081
9082
9082
9083
static int
9083
9084
mark_reachable (struct assembler * a ) {
9084
- basicblock * * stack , * * sp ;
9085
- sp = stack = (basicblock * * )PyObject_Malloc (sizeof (basicblock * ) * a -> a_nblocks );
9085
+ basicblock * * stack = make_cfg_traversal_stack (a -> a_entry );
9086
9086
if (stack == NULL ) {
9087
9087
return -1 ;
9088
9088
}
9089
+ basicblock * * sp = stack ;
9089
9090
a -> a_entry -> b_predecessors = 1 ;
9090
9091
* sp ++ = a -> a_entry ;
9091
9092
while (sp > stack ) {
9092
9093
basicblock * b = * (-- sp );
9094
+ b -> b_visited = 1 ;
9093
9095
if (b -> b_next && !b -> b_nofallthrough ) {
9094
- if (b -> b_next -> b_predecessors == 0 ) {
9096
+ if (!b -> b_next -> b_visited ) {
9097
+ assert (b -> b_next -> b_predecessors == 0 );
9095
9098
* sp ++ = b -> b_next ;
9096
9099
}
9097
9100
b -> b_next -> b_predecessors ++ ;
@@ -9101,14 +9104,15 @@ mark_reachable(struct assembler *a) {
9101
9104
struct instr * instr = & b -> b_instr [i ];
9102
9105
if (is_jump (instr ) || is_block_push (instr )) {
9103
9106
target = instr -> i_target ;
9104
- if (target -> b_predecessors == 0 ) {
9107
+ if (!target -> b_visited ) {
9108
+ assert (target -> b_predecessors == 0 || target == b -> b_next );
9105
9109
* sp ++ = target ;
9106
9110
}
9107
9111
target -> b_predecessors ++ ;
9108
9112
}
9109
9113
}
9110
9114
}
9111
- PyObject_Free (stack );
9115
+ PyMem_Free (stack );
9112
9116
return 0 ;
9113
9117
}
9114
9118
@@ -9128,12 +9132,15 @@ eliminate_empty_basic_blocks(basicblock *entry) {
9128
9132
if (b -> b_iused == 0 ) {
9129
9133
continue ;
9130
9134
}
9131
- if (is_jump (& b -> b_instr [b -> b_iused - 1 ])) {
9132
- basicblock * target = b -> b_instr [b -> b_iused - 1 ].i_target ;
9133
- while (target -> b_iused == 0 ) {
9134
- target = target -> b_next ;
9135
+ for (int i = 0 ; i < b -> b_iused ; i ++ ) {
9136
+ struct instr * instr = & b -> b_instr [i ];
9137
+ if (is_jump (instr ) || is_block_push (instr )) {
9138
+ basicblock * target = instr -> i_target ;
9139
+ while (target -> b_iused == 0 ) {
9140
+ target = target -> b_next ;
9141
+ }
9142
+ instr -> i_target = target ;
9135
9143
}
9136
- b -> b_instr [b -> b_iused - 1 ].i_target = target ;
9137
9144
}
9138
9145
}
9139
9146
}
@@ -9253,7 +9260,7 @@ optimize_cfg(struct compiler *c, struct assembler *a, PyObject *consts)
9253
9260
9254
9261
// Remove trailing unused constants.
9255
9262
static int
9256
- trim_unused_consts (struct compiler * c , struct assembler * a , PyObject * consts )
9263
+ trim_unused_consts (struct assembler * a , PyObject * consts )
9257
9264
{
9258
9265
assert (PyList_CheckExact (consts ));
9259
9266
0 commit comments