+#include "pycore_optimizer.h"
+
+
+int
+_Py_uop_analyze_and_optimize(
+ PyCodeObject *co,
+ _PyUOpInstruction *trace,
+ int trace_len,
+ int curr_stacklen
+)
+{
+ return trace_len;
+}
diff --git a/Tools/c-analyzer/cpython/_parser.py b/Tools/c-analyzer/cpython/_parser.py
index 9bc7285e18b2fb..90334d0e79da80 100644
--- a/Tools/c-analyzer/cpython/_parser.py
+++ b/Tools/c-analyzer/cpython/_parser.py
@@ -84,6 +84,7 @@ def clean_lines(text):
Python/frozen_modules/*.h
Python/generated_cases.c.h
Python/executor_cases.c.h
+Python/abstract_interp_cases.c.h
# not actually source
Python/bytecodes.c
diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv
index c64d391bae13bd..d1ac0410619c96 100644
--- a/Tools/c-analyzer/cpython/ignored.tsv
+++ b/Tools/c-analyzer/cpython/ignored.tsv
@@ -716,3 +716,5 @@ Modules/expat/xmlrole.c - error -
## other
Modules/_io/_iomodule.c - _PyIO_Module -
Modules/_sqlite/module.c - _sqlite3module -
+Python/optimizer_analysis.c - _Py_PartitionRootNode_Type -
+Python/optimizer_analysis.c - _Py_UOpsAbstractInterpContext_Type -
diff --git a/Tools/cases_generator/generate_cases.py b/Tools/cases_generator/generate_cases.py
index d35a16a80e8d00..ccd89c568624d2 100644
--- a/Tools/cases_generator/generate_cases.py
+++ b/Tools/cases_generator/generate_cases.py
@@ -15,6 +15,7 @@
from flags import InstructionFlags, variable_used
from instructions import (
AnyInstruction,
+ AbstractInstruction,
Component,
Instruction,
MacroInstruction,
@@ -43,6 +44,9 @@
DEFAULT_EXECUTOR_OUTPUT = os.path.relpath(
os.path.join(ROOT, "Python/executor_cases.c.h")
)
+DEFAULT_ABSTRACT_INTERPRETER_OUTPUT = os.path.relpath(
+ os.path.join(ROOT, "Python/abstract_interp_cases.c.h")
+)
# Constants used instead of size for macro expansions.
# Note: 1, 2, 4 must match actual cache entry sizes.
@@ -57,6 +61,23 @@
INSTR_FMT_PREFIX = "INSTR_FMT_"
+# TODO: generate all these after updating the DSL
+SPECIALLY_HANDLED_ABSTRACT_INSTR = {
+ "LOAD_FAST",
+ "LOAD_FAST_CHECK",
+ "LOAD_FAST_AND_CLEAR",
+ "LOAD_CONST",
+ "STORE_FAST",
+ "STORE_FAST_MAYBE_NULL",
+ "COPY",
+
+ # Arithmetic
+ "_BINARY_OP_MULTIPLY_INT",
+ "_BINARY_OP_ADD_INT",
+ "_BINARY_OP_SUBTRACT_INT",
+
+}
+
arg_parser = argparse.ArgumentParser(
description="Generate the code for the interpreter switch.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
@@ -91,7 +112,13 @@
help="Write executor cases to this file",
default=DEFAULT_EXECUTOR_OUTPUT,
)
-
+arg_parser.add_argument(
+ "-a",
+ "--abstract-interpreter-cases",
+ type=str,
+ help="Write abstract interpreter cases to this file",
+ default=DEFAULT_ABSTRACT_INTERPRETER_OUTPUT,
+)
class Generator(Analyzer):
def get_stack_effect_info(
@@ -108,7 +135,7 @@ def effect_str(effects: list[StackEffect]) -> str:
pushed: str | None
match thing:
case parsing.InstDef():
- if thing.kind != "op":
+ if thing.kind != "op" or self.instrs[thing.name].is_viable_uop():
instr = self.instrs[thing.name]
popped = effect_str(instr.input_effects)
pushed = effect_str(instr.output_effects)
@@ -604,6 +631,35 @@ def write_executor_instructions(
file=sys.stderr,
)
+ def write_abstract_interpreter_instructions(
+ self, abstract_interpreter_filename: str, emit_line_directives: bool
+ ) -> None:
+ """Generate cases for the Tier 2 abstract interpreter/analzyer."""
+ with open(abstract_interpreter_filename, "w") as f:
+ self.out = Formatter(f, 8, emit_line_directives)
+ self.write_provenance_header()
+ for thing in self.everything:
+ match thing:
+ case OverriddenInstructionPlaceHolder():
+ pass
+ case parsing.InstDef():
+ instr = AbstractInstruction(self.instrs[thing.name].inst)
+ if instr.is_viable_uop() and instr.name not in SPECIALLY_HANDLED_ABSTRACT_INSTR:
+ self.out.emit("")
+ with self.out.block(f"case {thing.name}:"):
+ instr.write(self.out, tier=TIER_TWO)
+ self.out.emit("break;")
+ case parsing.Macro():
+ pass
+ case parsing.Pseudo():
+ pass
+ case _:
+ typing.assert_never(thing)
+ print(
+ f"Wrote some stuff to {abstract_interpreter_filename}",
+ file=sys.stderr,
+ )
+
def write_overridden_instr_place_holder(
self, place_holder: OverriddenInstructionPlaceHolder
) -> None:
@@ -645,6 +701,8 @@ def main():
a.write_instructions(args.output, args.emit_line_directives)
a.write_metadata(args.metadata, args.pymetadata)
a.write_executor_instructions(args.executor_cases, args.emit_line_directives)
+ a.write_abstract_interpreter_instructions(args.abstract_interpreter_cases,
+ args.emit_line_directives)
if __name__ == "__main__":
diff --git a/Tools/cases_generator/instructions.py b/Tools/cases_generator/instructions.py
index aa94dbb07ea1c0..a505df08fa265b 100644
--- a/Tools/cases_generator/instructions.py
+++ b/Tools/cases_generator/instructions.py
@@ -248,6 +248,25 @@ def write_body(
InstructionOrCacheEffect = Instruction | parsing.CacheEffect
+# Instruction used for abstract interpretation.
+class AbstractInstruction(Instruction):
+ def __init__(self, inst: parsing.InstDef):
+ super().__init__(inst)
+
+ def write(self, out: Formatter, tier: Tiers = TIER_ONE) -> None:
+ """Write one abstract instruction, sans prologue and epilogue."""
+ stacking.write_single_instr_for_abstract_interp(self, out)
+
+ def write_body(
+ self,
+ out: Formatter,
+ dedent: int,
+ active_caches: list[ActiveCacheEffect],
+ tier: Tiers = TIER_ONE,
+ ) -> None:
+ pass
+
+
@dataclasses.dataclass
class Component:
instr: Instruction
diff --git a/Tools/cases_generator/stacking.py b/Tools/cases_generator/stacking.py
index 9bb7f468442245..31a21e026cb49c 100644
--- a/Tools/cases_generator/stacking.py
+++ b/Tools/cases_generator/stacking.py
@@ -391,3 +391,32 @@ def write_components(
poke.as_stack_effect(),
poke.effect,
)
+
+
+def write_single_instr_for_abstract_interp(
+ instr: Instruction, out: Formatter
+):
+ try:
+ _write_components_for_abstract_interp(
+ [Component(instr, instr.active_caches)],
+ out,
+ )
+ except AssertionError as err:
+ raise AssertionError(f"Error writing abstract instruction {instr.name}") from err
+
+
+def _write_components_for_abstract_interp(
+ parts: list[Component],
+ out: Formatter,
+):
+ managers = get_managers(parts)
+ for mgr in managers:
+ if mgr is managers[-1]:
+ out.stack_adjust(mgr.final_offset.deep, mgr.final_offset.high)
+ # Use clone() since adjust_inverse() mutates final_offset
+ mgr.adjust_inverse(mgr.final_offset.clone())
+ # NULL out the output stack effects
+ for poke in mgr.pokes:
+ if not poke.effect.size and poke.effect.name not in mgr.instr.unmoved_names:
+ out.emit(f"PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)"
+ f"PARTITIONNODE_NULLROOT, PEEK(-({poke.offset.as_index()})), true);")
pFad - Phonifier reborn
Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.
Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.
Alternative Proxies:
Alternative Proxy
pFad Proxy
pFad v3 Proxy
pFad v4 Proxy