From 7457d45e9edb0805436ac2f6ff9a7db312eede25 Mon Sep 17 00:00:00 2001 From: Thomas Wouters Date: Fri, 1 Nov 2024 15:43:21 +0100 Subject: [PATCH] Add free-threaded specialization for COMPARE_OP, and tests for COMPARE_OP specialization in general. --- Lib/test/test_dis.py | 26 ++++++++++++++++++++++++++ Python/bytecodes.c | 2 +- Python/generated_cases.c.h | 2 +- Python/specialize.c | 12 +++++++----- 4 files changed, 35 insertions(+), 7 deletions(-) diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py index a991c67fca46be..95f906dd982396 100644 --- a/Lib/test/test_dis.py +++ b/Lib/test/test_dis.py @@ -1335,6 +1335,32 @@ def test_call_specialize(self): got = self.get_disassembly(co, adaptive=True) self.do_disassembly_compare(got, call_quicken) + @cpython_only + @requires_specialization_ft + def test_compare_specialize(self): + compare_op_quicken = """\ + 0 RESUME_CHECK 0 + + 1 LOAD_NAME 0 (a) + LOAD_NAME 1 (b) + %s + RETURN_VALUE +""" + co_int = compile('a == b', "", "eval") + self.code_quicken(lambda: exec(co_int, {}, {'a': 1, 'b': 2})) + got = self.get_disassembly(co_int, adaptive=True) + self.do_disassembly_compare(got, compare_op_quicken % "COMPARE_OP_INT 72 (==)") + + co_float = compile('a == b', "", "eval") + self.code_quicken(lambda: exec(co_float, {}, {'a': 1.0, 'b': 2.0})) + got = self.get_disassembly(co_float, adaptive=True) + self.do_disassembly_compare(got, compare_op_quicken % "COMPARE_OP_FLOAT 72 (==)") + + co_unicode = compile('a == b', "", "eval") + self.code_quicken(lambda: exec(co_unicode, {}, {'a': 'a', 'b': 'b'})) + got = self.get_disassembly(co_unicode, adaptive=True) + self.do_disassembly_compare(got, compare_op_quicken % "COMPARE_OP_STR 72 (==)") + @cpython_only @requires_specialization def test_loop_quicken(self): diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 2c78cb9931733d..8dcc2cb038832b 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -2381,7 +2381,7 @@ dummy_func( }; specializing op(_SPECIALIZE_COMPARE_OP, (counter/1, left, right -- left, right)) { - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { next_instr = this_instr; _Py_Specialize_CompareOp(left, right, next_instr, oparg); diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index eff246f1997276..b4e06faa440ffe 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -3216,7 +3216,7 @@ left = stack_pointer[-2]; uint16_t counter = read_u16(&this_instr[1].cache); (void)counter; - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { next_instr = this_instr; _PyFrame_SetStackPointer(frame, stack_pointer); diff --git a/Python/specialize.c b/Python/specialize.c index 86cb997ca2ced3..c4d5a7dc476b13 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -2382,8 +2382,9 @@ _Py_Specialize_CompareOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *i { PyObject *lhs = PyStackRef_AsPyObjectBorrow(lhs_st); PyObject *rhs = PyStackRef_AsPyObjectBorrow(rhs_st); + uint8_t specialized_op; - assert(ENABLE_SPECIALIZATION); + assert(ENABLE_SPECIALIZATION_FT); assert(_PyOpcode_Caches[COMPARE_OP] == INLINE_CACHE_ENTRIES_COMPARE_OP); // All of these specializations compute boolean values, so they're all valid // regardless of the fifth-lowest oparg bit. @@ -2393,12 +2394,12 @@ _Py_Specialize_CompareOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *i goto failure; } if (PyFloat_CheckExact(lhs)) { - instr->op.code = COMPARE_OP_FLOAT; + specialized_op = COMPARE_OP_FLOAT; goto success; } if (PyLong_CheckExact(lhs)) { if (_PyLong_IsCompact((PyLongObject *)lhs) && _PyLong_IsCompact((PyLongObject *)rhs)) { - instr->op.code = COMPARE_OP_INT; + specialized_op = COMPARE_OP_INT; goto success; } else { @@ -2413,18 +2414,19 @@ _Py_Specialize_CompareOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *i goto failure; } else { - instr->op.code = COMPARE_OP_STR; + specialized_op = COMPARE_OP_STR; goto success; } } SPECIALIZATION_FAIL(COMPARE_OP, compare_op_fail_kind(lhs, rhs)); failure: STAT_INC(COMPARE_OP, failure); - instr->op.code = COMPARE_OP; + SET_OPCODE_OR_RETURN(instr, COMPARE_OP); cache->counter = adaptive_counter_backoff(cache->counter); return; success: STAT_INC(COMPARE_OP, success); + SET_OPCODE_OR_RETURN(instr, specialized_op); cache->counter = adaptive_counter_cooldown(); }