summary refs log tree commit diff
path: root/calculator.py
blob: 37bf39bba3a9b025bea4d832976c3937f7e7bc25 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
from dataclasses import dataclass,make_dataclass

@dataclass
class Number:
    _: int

ParenClose = make_dataclass('ParenClose',[])
ParenOpen = make_dataclass('ParenOpen',[])
AddOp = make_dataclass('AddOp',[])
SubOp = make_dataclass('SubOp',[])
MulOp = make_dataclass('MulOp',[])
DivOp = make_dataclass('DivOp',[])
PowOp = make_dataclass('PowOp',[])
NegOp = make_dataclass('NegOp',[])
ModOp = make_dataclass('ModOp',[])
FacOp = make_dataclass('FacOp',[])
AvgOp = make_dataclass('AvgOp',[])
MaxOp = make_dataclass('MaxOp',[])
MinOp = make_dataclass('MinOp',[])

Token = Number | AddOp | SubOp | MulOp | DivOp | PowOp | NegOp | ModOp | FacOp | AvgOp | MaxOp | MinOp | ParenClose | ParenOpen

Characters = {
    '(': ParenOpen,
    ')': ParenClose,
    '+': AddOp,
    '-': SubOp,
    '*': MulOp,
    '/': DivOp,
    '^': PowOp,
    '~': NegOp,
    '%': ModOp,
    '!': FacOp,
    '@': AvgOp,
    '$': MaxOp,
    '&': MinOp
}

from tokenize import tokenize, untokenize, NUMBER, STRING, NAME, OP
from io import BytesIO
def Tokenize(s: str) -> [Token]:
    g = tokenize(BytesIO(s.encode('utf-8')).readline)
    r = []
    for n, v, _, _, _ in g:
        match (n,v):
            case (2,x):
                r.append(Number(float(x)))
            case (54|60,x) if x in Characters:
                r.append(Characters[x]())
    return r

def Expression(e:  [Token]) -> (int, [Token]):
    r, e = Term(e)
    while len(e) > 0:
        match e.pop(0):
            case AddOp():
                x, e = Term(e)
                r += x
            case SubOp():
                x, e = Term(e)
                r -= x
            case x:
                e.insert(0, x)
                break
    return r, e

def Term(e: [Token]) -> (int, [Token]):
    r, e = Factor(e)
    while len(e) > 0:
        match e.pop(0):
            case ModOp():
                x, e = Factor(e)
                r %= x
            case AvgOp():
                x, e = Factor(e)
                r = (x + r) / 2
            case MinOp():
                x, e = Factor(e)
                r = min(x, r)
            case MaxOp():
                x, e = Factor(e)
                r = max(x,r)
            case MulOp():
                x, e = Factor(e)
                r *= x
            case DivOp():
                x, e = Factor(e)
                r /= x
            case PowOp():
                x, e = Factor(e)
                r **= x
            case x:
                e.insert(0, x)
                break
    return r,e

def Factor(e: [Token]) -> (int, [Token]):
    assert len(e) > 0
    match e.pop(0):
        case NegOp():
            r, e = Factor(e)
            r = -r
        case Number(n):
            r = n
        case ParenOpen():
            r, e = Expression(e)
            assert isinstance(e.pop(0), ParenClose)
    
    if len(e) > 0 and isinstance(e[0], FacOp):
        from math import factorial
        e.pop(0)
        r = float(factorial(int(r)))
    return r, e

# where is my compose
Evaluate = lambda x: Expression(Tokenize(x))[0]