@@ -42,6 +42,12 @@ typedef struct {
42
42
uint16_t index ;
43
43
} _PyAdaptiveEntry ;
44
44
45
+
46
+ typedef struct {
47
+ uint32_t tp_version ;
48
+ uint32_t dk_version_or_hint ;
49
+ } _PyLoadAttrCache ;
50
+
45
51
/* Add specialized versions of entries to this union.
46
52
*
47
53
* Do not break the invariant: sizeof(SpecializedCacheEntry) == 8
@@ -55,6 +61,7 @@ typedef struct {
55
61
typedef union {
56
62
_PyEntryZero zero ;
57
63
_PyAdaptiveEntry adaptive ;
64
+ _PyLoadAttrCache load_attr ;
58
65
} SpecializedCacheEntry ;
59
66
60
67
#define INSTRUCTIONS_PER_ENTRY (sizeof(SpecializedCacheEntry)/sizeof(_Py_CODEUNIT))
@@ -255,6 +262,83 @@ PyAPI_FUNC(PyObject *) _PyCode_GetCellvars(PyCodeObject *);
255
262
PyAPI_FUNC (PyObject * ) _PyCode_GetFreevars (PyCodeObject * );
256
263
257
264
265
+ /* Cache hits and misses */
266
+
267
+ static inline uint8_t
268
+ saturating_increment (uint8_t c )
269
+ {
270
+ return c <<1 ;
271
+ }
272
+
273
+ static inline uint8_t
274
+ saturating_decrement (uint8_t c )
275
+ {
276
+ return (c >>1 ) + 128 ;
277
+ }
278
+
279
+ static inline uint8_t
280
+ saturating_zero (void )
281
+ {
282
+ return 255 ;
283
+ }
284
+
285
+ /* Starting value for saturating counter.
286
+ * Technically this should be 1, but that is likely to
287
+ * cause a bit of thrashing when we optimize then get an immediate miss.
288
+ * We want to give the counter a change to stabilize, so we start at 3.
289
+ */
290
+ static inline uint8_t
291
+ saturating_start (void )
292
+ {
293
+ return saturating_zero ()<<3 ;
294
+ }
295
+
296
+ static inline void
297
+ record_cache_hit (_PyAdaptiveEntry * entry ) {
298
+ entry -> counter = saturating_increment (entry -> counter );
299
+ }
300
+
301
+ static inline void
302
+ record_cache_miss (_PyAdaptiveEntry * entry ) {
303
+ entry -> counter = saturating_decrement (entry -> counter );
304
+ }
305
+
306
+ static inline int
307
+ too_many_cache_misses (_PyAdaptiveEntry * entry ) {
308
+ return entry -> counter == saturating_zero ();
309
+ }
310
+
311
+ #define BACKOFF 64
312
+
313
+ static inline void
314
+ cache_backoff (_PyAdaptiveEntry * entry ) {
315
+ entry -> counter = BACKOFF ;
316
+ }
317
+
318
+ /* Specialization functions */
319
+
320
+ int _Py_Specialize_LoadAttr (PyObject * owner , _Py_CODEUNIT * instr , PyObject * name , SpecializedCacheEntry * cache );
321
+
322
+ #define SPECIALIZATION_STATS 0
323
+ #if SPECIALIZATION_STATS
324
+
325
+ typedef struct _specialization_stats {
326
+ uint64_t specialization_success ;
327
+ uint64_t specialization_failure ;
328
+ uint64_t loadattr_hit ;
329
+ uint64_t loadattr_deferred ;
330
+ uint64_t loadattr_miss ;
331
+ uint64_t loadattr_deopt ;
332
+ } SpecializationStats ;
333
+
334
+ extern SpecializationStats _specialization_stats ;
335
+ #define STAT_INC (name ) _specialization_stats.name++
336
+ void _Py_PrintSpecializationStats (void );
337
+ #else
338
+ #define STAT_INC (name ) ((void)0)
339
+ #endif
340
+
341
+
258
342
#ifdef __cplusplus
259
343
}
260
344
#endif
0 commit comments