-
Notifications
You must be signed in to change notification settings - Fork 1
/
interpreter.py
77 lines (62 loc) · 2.29 KB
/
interpreter.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
import tokenize
from parser import Parser
from product_system import ProductSystem
from program import *
def main():
print(
"""
_ _ _ _
| | | | (_) | |
| | ___| | __ _ ___ ___ _ ___ __ _| |
| |/ __| |/ _` / __/ __| |/ __/ _` | |
|_| (__| | (_| \__ \__ \ | (_| (_| | |
(_)\___|_|\__,_|___/___/_|\___\__,_|_|
Welcome to the glorious !classical quil interpreter!
To exit, use Ctrl-D
"""
)
system = ProductSystem(1)
program = Program()
# Read all instructions and set labels
while True:
try:
text = input()
if not text:
continue
# Define position of labels
tokens = tokenize.LexicalAnalyzer(text).lex()
parsed_object = Parser(tokens).parse()
program.add_instruction(text)
if len(parsed_object) > 0 and isinstance(parsed_object[0], Label):
program.set_label(parsed_object[0])
except EOFError:
break
# Begin actual execution
program.begin()
while not program.is_complete():
tokens = tokenize.LexicalAnalyzer(program.instruction()).lex()
parsed_output = Parser(tokens).parse()
for parsed_object in parsed_output:
if isinstance(parsed_object, Label):
break # skip because labels are set on first pass
elif isinstance(parsed_object, Variable):
program.define_variable(parsed_object)
elif isinstance(parsed_object, Jump):
program.jump(parsed_object)
elif parsed_object.name == "CNOT":
system.multiGate(
"CNOT", parsed_object.qubit1.number, parsed_object.qubit2.number
)
elif parsed_object.name == "MEASURE":
measurement = system.measure(parsed_object.qubit.number)
program.set_variable(parsed_object.var, measurement)
else:
system.singleGate(parsed_object.name, parsed_object.qubit.number)
program.print_memory()
system.print_probabilities()
system.print_result()
if __name__ == "__main__":
main()
# ans = tokenize.LexicalAnalyzer("CNOT 0 1 # I am a comment! ")
# ans2 = ans.lex()
# parser = Parser(ans2)