Skip to content

Commit

Permalink
Address code review
Browse files Browse the repository at this point in the history
  • Loading branch information
corona10 committed Aug 18, 2023
1 parent fd4743a commit 56a9fdc
Show file tree
Hide file tree
Showing 11 changed files with 273 additions and 170 deletions.
10 changes: 6 additions & 4 deletions .github/workflows/mypy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@ jobs:
with:
python-version: "3.x"
cache: pip
cache-dependency-path: Tools/clinic/requirements-dev.txt
- run: pip install -r Tools/clinic/requirements-dev.txt
cache-dependency-path: Tools/requirements-dev.txt
- run: pip install -r Tools/requirements-dev.txt
- run: mypy --config-file Tools/clinic/mypy.ini

mypy-cases-generator:
Expand All @@ -49,6 +49,8 @@ jobs:
with:
python-version: "3.x"
cache: pip
cache-dependency-path: Tools/cases_generator/requirements-dev.txt
- run: pip install -r Tools/cases_generator/requirements-dev.txt
cache-dependency-path: Tools/requirements-dev.txt
- name: Line with black
run: black Tools/cases_generator
- run: pip install -r Tools/requirements-dev.txt
- run: mypy --config-file Tools/cases_generator/mypy.ini
4 changes: 4 additions & 0 deletions Tools/cases_generator/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,8 @@ def parse_file(self, filename: str, instrs_idx: dict[str, int]) -> None:
case parsing.Pseudo(name):
self.pseudos[name] = thing
self.everything.append(thing)
case _:
typing.assert_never(thing)
if not psr.eof():
raise psr.make_syntax_error(f"Extra stuff at the end of {filename}")

Expand Down Expand Up @@ -402,4 +404,6 @@ def check_macro_components(
components.append(self.instrs[name])
case parsing.CacheEffect():
components.append(uop)
case _:
typing.assert_never(uop)
return components
12 changes: 6 additions & 6 deletions Tools/cases_generator/flags.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def __post_init__(self) -> None:
self.bitmask = {name: (1 << i) for i, name in enumerate(self.names())}

@staticmethod
def fromInstruction(instr: parsing.Node) -> 'InstructionFlags':
def fromInstruction(instr: parsing.Node) -> "InstructionFlags":

has_free = (
variable_used(instr, "PyCell_New")
Expand All @@ -41,15 +41,15 @@ def fromInstruction(instr: parsing.Node) -> 'InstructionFlags':
)

@staticmethod
def newEmpty() -> 'InstructionFlags':
def newEmpty() -> "InstructionFlags":
return InstructionFlags(False, False, False, False, False, False)

def add(self, other: "InstructionFlags") -> None:
for name, value in dataclasses.asdict(other).items():
if value:
setattr(self, name, value)

def names(self, value: bool|None = None) -> list[str]:
def names(self, value: bool | None = None) -> list[str]:
if value is None:
return list(dataclasses.asdict(self).keys())
return [n for n, v in dataclasses.asdict(self).items() if v == value]
Expand Down Expand Up @@ -90,9 +90,9 @@ def variable_used_unspecialized(node: parsing.Node, name: str) -> bool:
text = "".join(token.text.split())
# TODO: Handle nested #if
if text == "#if":
if (
i + 1 < len(node.tokens)
and node.tokens[i + 1].text in ("ENABLE_SPECIALIZATION", "TIER_ONE")
if i + 1 < len(node.tokens) and node.tokens[i + 1].text in (
"ENABLE_SPECIALIZATION",
"TIER_ONE",
):
skipping = True
elif text in ("#else", "#endif"):
Expand Down
5 changes: 3 additions & 2 deletions Tools/cases_generator/formatting.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import collections
import contextlib
import re
import typing
Expand Down Expand Up @@ -58,13 +59,13 @@ def reset_lineno(self) -> None:
self.set_lineno(self.lineno + 1, self.filename)

@contextlib.contextmanager
def indent(self) -> typing.Iterator[None]:
def indent(self) -> collections.abc.Iterator[None]:
self.prefix += " "
yield
self.prefix = self.prefix[:-4]

@contextlib.contextmanager
def block(self, head: str, tail: str = "") -> typing.Iterator[None]:
def block(self, head: str, tail: str = "") -> collections.abc.Iterator[None]:
if head:
self.emit(head + " {")
else:
Expand Down
73 changes: 42 additions & 31 deletions Tools/cases_generator/generate_cases.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,12 +79,10 @@
"STORE_FAST",
"STORE_FAST_MAYBE_NULL",
"COPY",

# Arithmetic
"_BINARY_OP_MULTIPLY_INT",
"_BINARY_OP_ADD_INT",
"_BINARY_OP_SUBTRACT_INT",

}

arg_parser = argparse.ArgumentParser(
Expand Down Expand Up @@ -143,6 +141,7 @@
default=DEFAULT_ABSTRACT_INTERPRETER_OUTPUT,
)


class Generator(Analyzer):
def get_stack_effect_info(
self, thing: parsing.InstDef | parsing.Macro | parsing.Pseudo
Expand Down Expand Up @@ -183,10 +182,14 @@ def effect_str(effects: list[StackEffect]) -> str:
target_popped = effect_str(target_instr.input_effects)
target_pushed = effect_str(target_instr.output_effects)
popped, pushed = target_popped, target_pushed
case _:
typing.assert_never(thing)
return instr, popped, pushed

@contextlib.contextmanager
def metadata_item(self, signature: str, open: str, close: str) -> typing.Iterator[None]:
def metadata_item(
self, signature: str, open: str, close: str
) -> typing.Iterator[None]:
self.out.emit("")
self.out.emit(f"extern {signature};")
self.out.emit("#ifdef NEED_OPCODE_METADATA")
Expand All @@ -211,7 +214,9 @@ def write_function(
) -> None:

with self.metadata_item(
f"int _PyOpcode_num_{direction}(int opcode, int oparg, bool jump)", "", ""
f"int _PyOpcode_num_{direction}(int opcode, int oparg, bool jump)",
"",
"",
):
with self.out.block("switch(opcode)"):
for instr, effect in data:
Expand Down Expand Up @@ -249,10 +254,11 @@ def assign_opcode_ids(self) -> None:

for instr in itertools.chain(
[instr for instr in self.instrs.values() if instr.kind != "op"],
self.macro_instrs.values()):
self.macro_instrs.values(),
):
assert isinstance(instr, (Instruction, MacroInstruction, PseudoInstruction))
name = instr.name
if name.startswith('INSTRUMENTED_'):
if name.startswith("INSTRUMENTED_"):
instrumented_ops.append(name)
else:
ops.append((instr.instr_flags.HAS_ARG_FLAG, name))
Expand All @@ -261,13 +267,13 @@ def assign_opcode_ids(self) -> None:
# rather than bytecodes.c, so we need to add it explicitly
# here (at least until we add something to bytecodes.c to
# declare external instructions).
instrumented_ops.append('INSTRUMENTED_LINE')
instrumented_ops.append("INSTRUMENTED_LINE")

# assert lists are unique
assert len(set(ops)) == len(ops)
assert len(set(instrumented_ops)) == len(instrumented_ops)

opname: list[str|None] = [None] * 512
opname: list[str | None] = [None] * 512
opmap: dict[str, int] = {}
markers: dict[str, int] = {}

Expand All @@ -278,16 +284,15 @@ def map_op(op: int, name: str) -> None:
opname[op] = name
opmap[name] = op


# 0 is reserved for cache entries. This helps debugging.
map_op(0, 'CACHE')
map_op(0, "CACHE")

# 17 is reserved as it is the initial value for the specializing counter.
# This helps catch cases where we attempt to execute a cache.
map_op(17, 'RESERVED')
map_op(17, "RESERVED")

# 166 is RESUME - it is hard coded as such in Tools/build/deepfreeze.py
map_op(166, 'RESUME')
map_op(166, "RESUME")

next_opcode = 1

Expand All @@ -299,13 +304,13 @@ def map_op(op: int, name: str) -> None:
assert next_opcode < 255
map_op(next_opcode, name)

if has_arg and 'HAVE_ARGUMENT' not in markers:
markers['HAVE_ARGUMENT'] = next_opcode
if has_arg and "HAVE_ARGUMENT" not in markers:
markers["HAVE_ARGUMENT"] = next_opcode

# Instrumented opcodes are at the end of the valid range
min_instrumented = 254 - (len(instrumented_ops) - 1)
assert next_opcode <= min_instrumented
markers['MIN_INSTRUMENTED_OPCODE'] = min_instrumented
markers["MIN_INSTRUMENTED_OPCODE"] = min_instrumented
for i, op in enumerate(instrumented_ops):
map_op(min_instrumented + i, op)

Expand All @@ -317,7 +322,9 @@ def map_op(op: int, name: str) -> None:
self.opmap = opmap
self.markers = markers

def write_opcode_ids(self, opcode_ids_h_filename: str, opcode_targets_filename: str) -> None:
def write_opcode_ids(
self, opcode_ids_h_filename: str, opcode_targets_filename: str
) -> None:
"""Write header file that defined the opcode IDs"""

with open(opcode_ids_h_filename, "w") as f:
Expand All @@ -330,7 +337,7 @@ def write_opcode_ids(self, opcode_ids_h_filename: str, opcode_targets_filename:
self.out.emit("#ifndef Py_OPCODE_IDS_H")
self.out.emit("#define Py_OPCODE_IDS_H")
self.out.emit("#ifdef __cplusplus")
self.out.emit("extern \"C\" {")
self.out.emit('extern "C" {')
self.out.emit("#endif")
self.out.emit("")
self.out.emit("/* Instruction opcodes for compiled code */")
Expand Down Expand Up @@ -363,7 +370,6 @@ def define(name: str, opcode: int) -> None:
targets[op] = f"TARGET_{name}"
f.write(",\n".join([f" &&{s}" for s in targets]))


def write_metadata(self, metadata_filename: str, pymetadata_filename: str) -> None:
"""Write instruction metadata to output file."""

Expand Down Expand Up @@ -458,7 +464,7 @@ def write_metadata(self, metadata_filename: str, pymetadata_filename: str) -> No
"const struct opcode_metadata "
"_PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE]",
"=",
";"
";",
):
# Write metadata for each instruction
for thing in self.everything:
Expand All @@ -471,13 +477,15 @@ def write_metadata(self, metadata_filename: str, pymetadata_filename: str) -> No
case parsing.Macro():
self.write_metadata_for_macro(self.macro_instrs[thing.name])
case parsing.Pseudo():
self.write_metadata_for_pseudo(self.pseudo_instrs[thing.name])
self.write_metadata_for_pseudo(
self.pseudo_instrs[thing.name]
)

with self.metadata_item(
"const struct opcode_macro_expansion "
"_PyOpcode_macro_expansion[OPCODE_MACRO_EXPANSION_SIZE]",
"=",
";"
";",
):
# Write macro expansion for each non-pseudo instruction
for thing in self.everything:
Expand Down Expand Up @@ -514,7 +522,9 @@ def write_metadata(self, metadata_filename: str, pymetadata_filename: str) -> No
self.write_uop_items(lambda name, counter: f'[{name}] = "{name}",')

with self.metadata_item(
f"const char *const _PyOpcode_OpName[{1 + max(self.opmap.values())}]", "=", ";"
f"const char *const _PyOpcode_OpName[{1 + max(self.opmap.values())}]",
"=",
";",
):
for name in self.opmap:
self.out.emit(f'[{name}] = "{name}",')
Expand All @@ -527,11 +537,9 @@ def write_metadata(self, metadata_filename: str, pymetadata_filename: str) -> No
for m in family.members:
deoptcodes[m] = name
# special case:
deoptcodes['BINARY_OP_INPLACE_ADD_UNICODE'] = 'BINARY_OP'
deoptcodes["BINARY_OP_INPLACE_ADD_UNICODE"] = "BINARY_OP"

with self.metadata_item(
f"const uint8_t _PyOpcode_Deopt[256]", "=", ";"
):
with self.metadata_item(f"const uint8_t _PyOpcode_Deopt[256]", "=", ";"):
for opt, deopt in sorted(deoptcodes.items()):
self.out.emit(f"[{opt}] = {deopt},")

Expand Down Expand Up @@ -589,10 +597,9 @@ def write_metadata(self, metadata_filename: str, pymetadata_filename: str) -> No
if name not in specialized_ops:
self.out.emit(f"'{name}': {op},")

for name in ['MIN_INSTRUMENTED_OPCODE', 'HAVE_ARGUMENT']:
for name in ["MIN_INSTRUMENTED_OPCODE", "HAVE_ARGUMENT"]:
self.out.emit(f"{name} = {self.markers[name]}")


def write_pseudo_instrs(self) -> None:
"""Write the IS_PSEUDO_INSTR macro"""
self.out.emit("\n\n#define IS_PSEUDO_INSTR(OP) ( \\")
Expand Down Expand Up @@ -815,7 +822,10 @@ def write_abstract_interpreter_instructions(
pass
case parsing.InstDef():
instr = AbstractInstruction(self.instrs[thing.name].inst)
if instr.is_viable_uop() and instr.name not in SPECIALLY_HANDLED_ABSTRACT_INSTR:
if (
instr.is_viable_uop()
and instr.name not in SPECIALLY_HANDLED_ABSTRACT_INSTR
):
self.out.emit("")
with self.out.block(f"case {thing.name}:"):
instr.write(self.out, tier=TIER_TWO)
Expand Down Expand Up @@ -878,8 +888,9 @@ def main() -> None:
a.write_opcode_ids(args.opcode_ids_h, args.opcode_targets_h)
a.write_metadata(args.metadata, args.pymetadata)
a.write_executor_instructions(args.executor_cases, args.emit_line_directives)
a.write_abstract_interpreter_instructions(args.abstract_interpreter_cases,
args.emit_line_directives)
a.write_abstract_interpreter_instructions(
args.abstract_interpreter_cases, args.emit_line_directives
)


if __name__ == "__main__":
Expand Down
Loading

0 comments on commit 56a9fdc

Please sign in to comment.