summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--calculator.py116
1 files changed, 116 insertions, 0 deletions
diff --git a/calculator.py b/calculator.py
new file mode 100644
index 0000000..37bf39b
--- /dev/null
+++ b/calculator.py
@@ -0,0 +1,116 @@
+from dataclasses import dataclass,make_dataclass
+
+@dataclass
+class Number:
+    _: int
+
+ParenClose = make_dataclass('ParenClose',[])
+ParenOpen = make_dataclass('ParenOpen',[])
+AddOp = make_dataclass('AddOp',[])
+SubOp = make_dataclass('SubOp',[])
+MulOp = make_dataclass('MulOp',[])
+DivOp = make_dataclass('DivOp',[])
+PowOp = make_dataclass('PowOp',[])
+NegOp = make_dataclass('NegOp',[])
+ModOp = make_dataclass('ModOp',[])
+FacOp = make_dataclass('FacOp',[])
+AvgOp = make_dataclass('AvgOp',[])
+MaxOp = make_dataclass('MaxOp',[])
+MinOp = make_dataclass('MinOp',[])
+
+Token = Number | AddOp | SubOp | MulOp | DivOp | PowOp | NegOp | ModOp | FacOp | AvgOp | MaxOp | MinOp | ParenClose | ParenOpen
+
+Characters = {
+    '(': ParenOpen,
+    ')': ParenClose,
+    '+': AddOp,
+    '-': SubOp,
+    '*': MulOp,
+    '/': DivOp,
+    '^': PowOp,
+    '~': NegOp,
+    '%': ModOp,
+    '!': FacOp,
+    '@': AvgOp,
+    '$': MaxOp,
+    '&': MinOp
+}
+
+from tokenize import tokenize, untokenize, NUMBER, STRING, NAME, OP
+from io import BytesIO
+def Tokenize(s: str) -> [Token]:
+    g = tokenize(BytesIO(s.encode('utf-8')).readline)
+    r = []
+    for n, v, _, _, _ in g:
+        match (n,v):
+            case (2,x):
+                r.append(Number(float(x)))
+            case (54|60,x) if x in Characters:
+                r.append(Characters[x]())
+    return r
+
+def Expression(e:  [Token]) -> (int, [Token]):
+    r, e = Term(e)
+    while len(e) > 0:
+        match e.pop(0):
+            case AddOp():
+                x, e = Term(e)
+                r += x
+            case SubOp():
+                x, e = Term(e)
+                r -= x
+            case x:
+                e.insert(0, x)
+                break
+    return r, e
+
+def Term(e: [Token]) -> (int, [Token]):
+    r, e = Factor(e)
+    while len(e) > 0:
+        match e.pop(0):
+            case ModOp():
+                x, e = Factor(e)
+                r %= x
+            case AvgOp():
+                x, e = Factor(e)
+                r = (x + r) / 2
+            case MinOp():
+                x, e = Factor(e)
+                r = min(x, r)
+            case MaxOp():
+                x, e = Factor(e)
+                r = max(x,r)
+            case MulOp():
+                x, e = Factor(e)
+                r *= x
+            case DivOp():
+                x, e = Factor(e)
+                r /= x
+            case PowOp():
+                x, e = Factor(e)
+                r **= x
+            case x:
+                e.insert(0, x)
+                break
+    return r,e
+
+def Factor(e: [Token]) -> (int, [Token]):
+    assert len(e) > 0
+    match e.pop(0):
+        case NegOp():
+            r, e = Factor(e)
+            r = -r
+        case Number(n):
+            r = n
+        case ParenOpen():
+            r, e = Expression(e)
+            assert isinstance(e.pop(0), ParenClose)
+    
+    if len(e) > 0 and isinstance(e[0], FacOp):
+        from math import factorial
+        e.pop(0)
+        r = float(factorial(int(r)))
+    return r, e
+
+# where is my compose
+Evaluate = lambda x: Expression(Tokenize(x))[0]
\ No newline at end of file