Skip to content

Commit

Permalink
fix cat
Browse files Browse the repository at this point in the history
  • Loading branch information
ruskaof committed Dec 18, 2023
1 parent 7a9a5fb commit 6fd2f83
Show file tree
Hide file tree
Showing 6 changed files with 243 additions and 817 deletions.
26 changes: 0 additions & 26 deletions computer_simulator/translator/__init__.py
Original file line number Diff line number Diff line change
@@ -1,26 +0,0 @@
from abc import ABC
from dataclasses import dataclass
from enum import Enum, auto


@dataclass
class Token(ABC):
class Type(Enum):
IF: int = auto()
OPEN_BRACKET: int = auto()
CLOSE_BRACKET: int = auto()
BINOP: int = auto()
BOOLEAN: int = auto()
INT: int = auto()
STRING: int = auto()
SETQ: int = auto()
IDENTIFIER: int = auto()
DEFUN: int = auto()
PRINT_CHAR: int = auto()
PRINT_STRING: int = auto()
PROGN: int = auto()
READ_STRING: int = auto()
WHILE: int = auto()

token_type: Type
value: str
21 changes: 18 additions & 3 deletions computer_simulator/translator/expression_translator.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
from typing import Callable, cast

from computer_simulator.isa import Arg, ArgType, Instruction, Opcode
from computer_simulator.translator import Token
from computer_simulator.translator.errors import InvalidSymbolsError
from computer_simulator.translator.tokenizer import Token

EXPECTED_IDENTIFIER = "Expected identifier"
STATIC_MEMORY_SIZE = 512
Expand Down Expand Up @@ -456,13 +456,27 @@ def handle_token_defun(tokens: list[Token], idx: int, result: Program, started_w
return get_expr_end_idx(tokens, body_end_idx, started_with_open_bracket)


def handle_open_bracket(tokens: list[Token], idx: int, result: Program, started_with_open_bracket: bool) -> int:
def handle_token_open_bracket(tokens: list[Token], idx: int, result: Program, started_with_open_bracket: bool) -> int:
inside_expr_end_idx = translate_expression(tokens, idx + 1, result)
if tokens[inside_expr_end_idx].token_type != Token.Type.CLOSE_BRACKET:
raise InvalidSymbolsError(got=tokens[inside_expr_end_idx], expected="close bracket")
return get_expr_end_idx(tokens, inside_expr_end_idx + 1, started_with_open_bracket)


def handle_token_read_char(tokens: list[Token], idx: int, result: Program, started_with_open_bracket: bool) -> int:
varname = tokens[idx + 1].value

var_sp_offset = result.get_var_sp_offset(varname)
if var_sp_offset is None:
result.push_var_to_stack(varname)

result.memory.append(Instruction(Opcode.IN, None))
result.memory.append(
Instruction(Opcode.ST, Arg(cast(int, result.get_var_sp_offset(varname)), ArgType.STACK_OFFSET)))

return get_expr_end_idx(tokens, idx + 2, started_with_open_bracket)


TOKEN_HANDLERS: dict[Token.Type, Callable[[list[Token], int, Program, bool], int]] = {
Token.Type.INT: handle_token_int,
Token.Type.BINOP: handle_token_binop,
Expand All @@ -476,7 +490,8 @@ def handle_open_bracket(tokens: list[Token], idx: int, result: Program, started_
Token.Type.READ_STRING: handle_token_read_string,
Token.Type.WHILE: handle_token_while,
Token.Type.DEFUN: handle_token_defun,
Token.Type.OPEN_BRACKET: handle_open_bracket,
Token.Type.OPEN_BRACKET: handle_token_open_bracket,
Token.Type.READ_CHAR: handle_token_read_char,
}


Expand Down
28 changes: 27 additions & 1 deletion computer_simulator/translator/tokenizer.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,38 @@
from __future__ import annotations

from dataclasses import dataclass
from enum import auto, Enum
from typing import Callable

from computer_simulator.translator import Token
from computer_simulator.translator.errors import InvalidSymbolsError

IDENTIFIER_NON_ALPHA_CHARS = {"_"}


@dataclass
class Token:
class Type(Enum):
IF: int = auto()
OPEN_BRACKET: int = auto()
CLOSE_BRACKET: int = auto()
BINOP: int = auto()
BOOLEAN: int = auto()
INT: int = auto()
STRING: int = auto()
SETQ: int = auto()
IDENTIFIER: int = auto()
DEFUN: int = auto()
PRINT_CHAR: int = auto()
PRINT_STRING: int = auto()
PROGN: int = auto()
READ_STRING: int = auto()
WHILE: int = auto()
READ_CHAR: int = auto()

token_type: Type
value: str


def process_whitespace(idx: int, chars: str) -> int:
if idx >= len(chars):
return idx
Expand Down Expand Up @@ -118,6 +143,7 @@ def process_keyword(tokens: list, idx: int, chars: str, token_type: Token.Type,
lambda tokens, idx, chars: process_keyword(tokens, idx, chars, Token.Type.PROGN, "progn"),
lambda tokens, idx, chars: process_keyword(tokens, idx, chars, Token.Type.READ_STRING, "read_string"),
lambda tokens, idx, chars: process_keyword(tokens, idx, chars, Token.Type.WHILE, "while"),
lambda tokens, idx, chars: process_keyword(tokens, idx, chars, Token.Type.READ_CHAR, "read_char"),
process_number_literal,
process_string_literal,
process_identifier,
Expand Down
3 changes: 1 addition & 2 deletions computer_simulator/translator/translator.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,8 @@
from pathlib import Path

from computer_simulator.isa import Instruction
from computer_simulator.translator import Token
from computer_simulator.translator.expression_translator import Program, translate_program
from computer_simulator.translator.tokenizer import tokenize
from computer_simulator.translator.tokenizer import tokenize, Token


def run_translator(tokens: list[Token]) -> Program:
Expand Down
Loading

0 comments on commit 6fd2f83

Please sign in to comment.