#Scheme Tokenizer
def Tokenize_scheme(string):
tokens = []
current_token = ""
token_mode = False
chars = iter(string)
try:
while True:
char = next(chars)
if token_mode:
if char == " " or char == "\n":
tokens.append(current_token)
current_token = ""
token_mode = False
elif char == ")":
tokens.append(char)
else:
current_token += char
else:
if char != " " and char != "\n":
current_token += char
if not char in ["(", ")", "+", "-", "*", "/"]:
token_mode = True
else:
tokens.append(current_token)
current_token = ""
except StopIteration:
tokens.append(current_token)
return tokens
"""
Tokenize_scheme("(+ 3 5)")
=> ['(', '+', '3', ')', '5']
Tokenize_scheme("(+ (- 3 6) 5)")
=> ['(', '+', '(', '-', '3', ')', '6', ')', '5']
"""