Skip to content

Instantly share code, notes, and snippets.

@judofyr
Created November 27, 2024 19:31
Show Gist options
  • Save judofyr/47cba8a20cb2cd5798943ef975d0cd8d to your computer and use it in GitHub Desktop.
Save judofyr/47cba8a20cb2cd5798943ef975d0cd8d to your computer and use it in GitHub Desktop.
Scrapscript reduced with C-reduce against RustPython
from __future__ import annotations
import argparse
import base64
import code
import dataclasses
import enum
import functools
import json
import logging
import os
import re
import struct
import sys
import typing
import unittest
import urllib.request
from dataclasses import dataclass
from enum import auto
from types import ModuleType
from typing import Any, Callable, Dict, Mapping, Optional, Set, Tuple, Union
class Token:
lineno: int = dataclasses.field(default=-1, init=False, compare=False)
@dataclass(eq=True)
class IntLit(Token):
value: int
@dataclass(eq=True)
@dataclass(eq=True)
class Juxt(Token):
# The space between other tokens that indicates function application.
pass
class Lexer:
def __init__(self, text: str):
self.text: str = text
self.idx: int = 0
self.lineno: int = 1
self.colno: int = 1
self.line: str = ""
def has_input(self) -> bool:
return self.idx < len(self.text)
def read_char(self) -> str:
c = self.peek_char()
if c == "\n":
self.lineno += 1
self.colno = 1
self.colno += 1
self.idx += 1
return c
def peek_char(self) -> str:
if not self.has_input():
raise UnexpectedEOFError("while reading token")
return self.text[self.idx]
def make_token(self, cls: type, *args: Any) -> Token:
result: Token = cls(*args)
def read_one(self) -> Token:
while self.has_input():
c = self.read_char()
if not c.isspace():
break
else:
return self.make_token(EOF)
return self.make_token(IntLit, int(buf))
def _starts_operator(self, buf: str) -> bool:
c = self.peek_char()
def tokenize(x: str) -> typing.List[Token]:
lexer = Lexer(x)
while (token := lexer.read_one()) and not isinstance(token, EOF):
tokens.append(token)
PS = {
}
class ParseError(SyntaxError):
pass
def parse_assign(tokens: typing.List[Token], p: float = 0) -> "Assign":
return Assign(Var("..."), assign)
def parse(tokens: typing.List[Token], p: float = 0) -> "Object":
if not tokens:
raise UnexpectedEOFError("unexpected end of input")
while True:
break
def __str__(self) -> str:
return {
}[binop_kind]
BINOP_HANDLERS: Dict[BinopKind, Callable[[Env, Object, Object], Object]] = {
}
class MatchError(Exception):
pass
def match(obj: Object, pattern: Object) -> Optional[Env]:
return {} if isinstance(obj, Hole) else None
def eval_exp(env: Env, exp: Object) -> Object:
with self.assertRaisesRegex(UnexpectedEOFError, "while reading string"):
tokenize('"hello')
self.assertEqual(
parse(
[
]
),
)
with self.assertRaisesRegex(ParseError, re.escape("spread must come at end of list match")):
parse(
[
]
)
self.assertEqual(
match(
),
match(
),
)
self.assertEqual(
match(
),
)
self.assertEqual(
match(
),
)
self.assertEqual(
match(
),
)
self.assertEqual(
match(
),
)
self.assertEqual(
[
],
)
text = """
"""
self.assertEqual(
MatchFunction(
[
MatchCase(
Apply(
),
),
]
),
)
self.assertEqual(
match(
),
)
self.assertEqual(
match(
),
)
self.assertEqual(
self._run(
"""
"""
"""
"""
),
)
self.assertEqual(
self._run(
"""
"""
),
)
with self.assertRaises(MatchError):
self._run(
"""
"""
)
self.assertEqual(
self._run(
"""
"""
),
)
self.assertEqual(
self._run(
"""
"""
),
)
self.assertEqual(
self._run(
"""
"""
),
)
self.assertEqual(
self._run(
"""
"""
),
)
with self.assertRaises(TypeError):
self._run(
"""
"""
)
self.assertEqual(
self._run(
),
)
with self.assertRaises(TypeError):
self._run(
)
self._run(
"""
"""
)
self.assertEqual(
self._run(
"""
"""
),
)
with self.assertRaises(TypeError):
self._run(
"""
"""
)
self.assertEqual(
self._run(
),
)
self.assertEqual(
self._run(
),
)
with self.assertRaises(TypeError):
self._run(
)
self._run(
"""
"""
)
self.assertEqual(
self._run(
"""
"""
),
)
self.assertEqual(
self._run(
"""
"""
),
)
self.assertEqual(
self._run(
"""
"""
),
)
self.assertEqual(
self._run(
"""
"""
),
)
with self.assertRaises(TypeError):
self._run(
"""
"""
)
self.assertEqual(
self._run(
),
)
while isinstance(result, TyVar):
return result
obj = MatchFunction(
[
]
)
STDLIB = {
}
PRELUDE = """
id = x -> x
"""
def boot_env() -> Env:
env_object = eval_exp(STDLIB, parse(tokenize(PRELUDE)))
def __init__(self, env: Env) -> None:
if state == 0:
self.matches = options[:]
try:
return self.matches[state]
except IndexError:
return None
class ScrapRepl(code.InteractiveConsole):
self.env: Env = boot_env()
try:
tokens = tokenize(source)
except Exception as e:
print(f"Error: {e}", file=sys.stderr)
def repl_command(args: argparse.Namespace) -> None:
if args.debug:
logging.basicConfig(level=logging.DEBUG)
repl = ScrapRepl()
if readline:
repl.enable_readline()
OP_ENV = {
}
def compile_command(args: argparse.Namespace) -> None:
args.compile = True
def main() -> None:
parser = argparse.ArgumentParser(prog="scrapscript")
subparsers = parser.add_subparsers(dest="command")
args = parser.parse_args()
if not args.command:
args.debug = False
repl_command(args)
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment