diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py index 57991b5b6b859c..0ae1c0047096f2 100644 --- a/Lib/test/test_generated_cases.py +++ b/Lib/test/test_generated_cases.py @@ -29,7 +29,7 @@ def skip_if_different_mount_drives(): test_tools.skip_if_missing("cases_generator") with test_tools.imports_under_tool("cases_generator"): - from analyzer import StackItem + from analyzer import StackItem, analyze_forest from cwriter import CWriter import parser from stack import Local, Stack @@ -2638,5 +2638,74 @@ def test_replace_opocode_uop_reject_array_effects(self): "Pure evaluation cannot take array-like inputs"): self.run_cases_test(input, input2, output) +class TestAnalyzer(unittest.TestCase): + """Tests for analyzer.add_macro() recording-uop placement rules (gh-148285).""" + + def _parse_and_analyze(self, src: str) -> None: + """Parse a raw DSL fragment and run analyze_forest() on it.""" + nodes = parse_src(src) + analyze_forest(nodes) + + # ---- shared DSL fragments ----------------------------------------------- + + _SPECIALIZE_OP = """\ +specializing op(_SPECIALIZE_DUMMY, (counter/1, value -- value)) { +} +""" + _RECORD_OP = """\ +op(_RECORD_DUMMY, (value -- value)) { + RECORD_VALUE(PyStackRef_AsPyObjectBorrow(value)); +} +""" + _WORKER_OP = """\ +op(_WORKER_DUMMY, (value -- res)) { + res = value; +} +""" + + # ---- test cases --------------------------------------------------------- + + def test_recording_uop_after_specializing(self): + """Valid: recording uop directly follows a specializing uop.""" + src = ( + self._SPECIALIZE_OP + + self._RECORD_OP + + self._WORKER_OP + + "macro(VALID_DIRECT) = _SPECIALIZE_DUMMY + _RECORD_DUMMY + _WORKER_DUMMY;\n" + ) + # Must not raise. + self._parse_and_analyze(src) + + def test_recording_uop_after_specializing_with_cache(self): + """Valid: recording uop follows a specializing uop with a cache effect between them. + + CacheEffect entries are transparent — they must not close the gate that + allows a recording uop to follow a specializing uop. + """ + src = ( + self._SPECIALIZE_OP + + self._RECORD_OP + + self._WORKER_OP + + "macro(VALID_CACHE) = _SPECIALIZE_DUMMY + unused/1 + _RECORD_DUMMY + _WORKER_DUMMY;\n" + ) + # Must not raise. + self._parse_and_analyze(src) + + def test_recording_uop_after_non_specializing_raises(self): + """Invalid: recording uop after a plain (non-specializing) uop must be rejected.""" + src = ( + self._SPECIALIZE_OP + + self._RECORD_OP + + self._WORKER_OP + + "macro(INVALID) = _WORKER_DUMMY + _RECORD_DUMMY;\n" + ) + with self.assertRaisesRegex( + SyntaxError, + r"Recording uop _RECORD_DUMMY must be first in macro " + r"or immediately follow a specializing uop", + ): + self._parse_and_analyze(src) + + if __name__ == "__main__": unittest.main() diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c index c12a4f4131bc7e..6e5af4793419f2 100644 --- a/Python/optimizer_bytecodes.c +++ b/Python/optimizer_bytecodes.c @@ -226,6 +226,10 @@ dummy_func(void) { } else { sym_set_const(owner, type); + if ((((PyTypeObject *)type)->tp_flags & Py_TPFLAGS_IMMUTABLETYPE) == 0) { + PyType_Watch(TYPE_WATCHER_ID, type); + _Py_BloomFilter_Add(dependencies, type); + } } } } @@ -1213,6 +1217,13 @@ dummy_func(void) { (void)framesize; } + op(_CHECK_IS_NOT_PY_CALLABLE, (callable, unused, unused[oparg] -- callable, unused, unused[oparg])) { + PyTypeObject *type = sym_get_type(callable); + if (type && type != &PyFunction_Type && type != &PyMethod_Type) { + ADD_OP(_NOP, 0, 0); + } + } + op(_PUSH_FRAME, (new_frame -- )) { SYNC_SP(); if (!CURRENT_FRAME_IS_INIT_SHIM()) { diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index 6ba9c43ef1f0c3..7cba8ae2239903 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -1132,7 +1132,11 @@ def add_macro( macro: parser.Macro, instructions: dict[str, Instruction], uops: dict[str, Uop] ) -> None: parts: list[Part] = [] - first = True + # Tracks the last "plain" uop (neither specializing nor recording). + # CacheEffect, flush, specializing, and recording uops all leave it + # unchanged, so prev_uop being non-None is sufficient to mean + # "a disqualifying uop has been seen before this recording uop". + prev_uop: Uop | None = None for part in macro.uops: match part: case parser.OpName(): @@ -1144,12 +1148,16 @@ def add_macro( f"No Uop named {part.name}", macro.tokens[0] ) uop = uops[part.name] - if uop.properties.records_value and not first: + if uop.properties.records_value and prev_uop is not None: raise analysis_error( - f"Recording uop {part.name} must be first in macro", + f"Recording uop {part.name} must be first in macro " + f"or immediately follow a specializing uop", macro.tokens[0]) parts.append(uop) - first = False + # Only plain worker uops set prev_uop; specializing and + # recording uops are excluded so the check above stays simple. + if not uop.properties.records_value and "specializing" not in uop.annotations: + prev_uop = uop case parser.CacheEffect(): parts.append(Skip(part.size)) case _: @@ -1158,6 +1166,7 @@ def add_macro( add_instruction(macro.first_token, macro.name, parts, instructions) + def add_family( pfamily: parser.Family, instructions: dict[str, Instruction],