feat: add task 2 calculator draft
This commit is contained in:
24
task2/command_translation.py
Normal file
24
task2/command_translation.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from collections import deque
|
||||
from typing import Iterable
|
||||
from icecream import ic
|
||||
|
||||
from math_objects import Operator, Token
|
||||
|
||||
|
||||
def translate(tokens: Iterable[Token]) -> Iterable[Token]:
|
||||
operator_stack: deque[Operator] = deque()
|
||||
previous_precedence = 0
|
||||
for token in tokens:
|
||||
if not isinstance(token, Operator):
|
||||
yield token
|
||||
continue
|
||||
if token.precedence <= previous_precedence:
|
||||
for item in reversed(operator_stack):
|
||||
yield item
|
||||
operator_stack.clear()
|
||||
previous_precedence = token.precedence
|
||||
operator_stack.append(token)
|
||||
ic(len(operator_stack))
|
||||
if operator_stack:
|
||||
for item in reversed(operator_stack):
|
||||
yield item
|
||||
28
task2/main.py
Normal file
28
task2/main.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from collections import deque
|
||||
from icecream import ic
|
||||
from typing import Iterable
|
||||
|
||||
from command_translation import translate
|
||||
from tokenizer import tokenize
|
||||
|
||||
from math_objects import Token, Integer, Operator
|
||||
|
||||
equation: str = "1+ 2+3/3-1"# input()
|
||||
|
||||
tokens: Iterable[Token] = tokenize(equation)
|
||||
sorted_tokens: Iterable[Token] = translate(tokens)
|
||||
|
||||
token_stack: deque[Token] = deque()
|
||||
|
||||
for token in sorted_tokens:
|
||||
if not isinstance(token, Operator):
|
||||
token_stack.append(token)
|
||||
continue
|
||||
b, a = token_stack.pop(), token_stack.pop()
|
||||
ic(a.value, token.value, b.value)
|
||||
new_integer = Integer(eval(f"{a.value}{token.value}{b.value}"))
|
||||
token_stack.append(new_integer)
|
||||
|
||||
ic(len(token_stack))
|
||||
|
||||
print(token_stack.pop().value)
|
||||
29
task2/math_objects.py
Normal file
29
task2/math_objects.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from collections import deque
|
||||
from typing import Literal, Self
|
||||
|
||||
|
||||
class Token[T]:
|
||||
value: T
|
||||
|
||||
def __init__(self, value: T) -> None:
|
||||
self.value = value
|
||||
|
||||
class Integer(Token[int]):
|
||||
@classmethod
|
||||
def create_from_string(cls, string: str) -> Self:
|
||||
return cls(int(string))
|
||||
|
||||
type _OperatorType = Literal["+", "-", "*", "/"]
|
||||
|
||||
class Operator(Token[_OperatorType]):
|
||||
def __init__(self, value: Literal["+", "-", "*", "/"], precedence = 0) -> None:
|
||||
super().__init__(value)
|
||||
self.precedence: int = precedence
|
||||
|
||||
type _Parentheses = Literal["(", ")"]
|
||||
|
||||
class Parenthesis(Token[_Parentheses]):
|
||||
pass
|
||||
|
||||
# class Stack(MathObject[deque[MathObject]]):
|
||||
# pass
|
||||
39
task2/tokenizer.py
Normal file
39
task2/tokenizer.py
Normal file
@@ -0,0 +1,39 @@
|
||||
from collections import deque
|
||||
from typing import Iterable, Literal, Optional
|
||||
|
||||
from math_objects import Integer, Operator, Parenthesis, Token
|
||||
|
||||
|
||||
def tokenize(string: str) -> Iterable[Token]:
|
||||
buffer: deque[str] = deque()
|
||||
prediction: Optional[Literal["number"]] = None
|
||||
|
||||
def _take_buffer() -> str:
|
||||
string = "".join(buffer)
|
||||
buffer.clear()
|
||||
return string
|
||||
|
||||
for c in string:
|
||||
if c.isdigit():
|
||||
if prediction != "number" and buffer:
|
||||
yield Integer.create_from_string(_take_buffer())
|
||||
prediction = "number"
|
||||
buffer.append(c)
|
||||
elif c in ["+", "-", "*", "/", "(", ")"]:
|
||||
if buffer:
|
||||
yield Integer.create_from_string(_take_buffer())
|
||||
prediction = None
|
||||
match c:
|
||||
case "+" | "-":
|
||||
yield Operator(c, precedence=1)
|
||||
case "*" | "/":
|
||||
yield Operator(c, precedence=2)
|
||||
case _:
|
||||
yield Parenthesis(c) # type: ignore
|
||||
prediction = None
|
||||
elif c == " ":
|
||||
pass
|
||||
else:
|
||||
raise SyntaxError()
|
||||
if buffer:
|
||||
yield Integer.create_from_string(_take_buffer())
|
||||
Reference in New Issue
Block a user