-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathrun.py
1936 lines (1636 loc) · 74.5 KB
/
run.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# concat pg_encoder.py, pg_logger.py, and generate_json_trace.py in
# front of our own code to simplify loading to pyodide.
# Online Python Tutor
# https://github.com/pgbovine/OnlinePythonTutor/
#
# Copyright (C) Philip J. Guo (philip@pgbovine.net)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Thanks to John DeNero for making the encoder work on both Python 2 and 3
# (circa 2012-2013)
# Given an arbitrary piece of Python data, encode it in such a manner
# that it can be later encoded into JSON.
# http://json.org/
#
# We use this function to encode run-time traces of data structures
# to send to the front-end.
#
# Format:
# Primitives:
# * None, int, long, float, str, bool - unchanged
# (json.dumps encodes these fine verbatim, except for inf, -inf, and nan)
#
# exceptions: float('inf') -> ['SPECIAL_FLOAT', 'Infinity']
# float('-inf') -> ['SPECIAL_FLOAT', '-Infinity']
# float('nan') -> ['SPECIAL_FLOAT', 'NaN']
# x == int(x) -> ['SPECIAL_FLOAT', '%.1f' % x]
# (this way, 3.0 prints as '3.0' and not as 3, which looks like an int)
#
# If render_heap_primitives is True, then primitive values are rendered
# on the heap as ['HEAP_PRIMITIVE', <type name>, <value>]
#
# (for SPECIAL_FLOAT values, <value> is a list like ['SPECIAL_FLOAT', 'Infinity'])
#
# added on 2018-06-13:
# ['IMPORTED_FAUX_PRIMITIVE', <label>] - renders externally imported objects
# like they were primitives, to save
# space and to prevent from having to
# recurse into of them to see internals
#
# Compound objects:
# * list - ['LIST', elt1, elt2, elt3, ..., eltN]
# * tuple - ['TUPLE', elt1, elt2, elt3, ..., eltN]
# * set - ['SET', elt1, elt2, elt3, ..., eltN]
# * dict - ['DICT', [key1, value1], [key2, value2], ..., [keyN, valueN]]
# * instance - ['INSTANCE', class name, [attr1, value1], [attr2, value2], ..., [attrN, valueN]]
# * instance with non-trivial __str__ defined - ['INSTANCE_PPRINT', class name, <__str__ value>, [attr1, value1], [attr2, value2], ..., [attrN, valueN]]
# * class - ['CLASS', class name, [list of superclass names], [attr1, value1], [attr2, value2], ..., [attrN, valueN]]
# * function - ['FUNCTION', function name, parent frame ID (for nested functions),
# [*OPTIONAL* list of pairs of default argument names/values] ] <-- final optional element added on 2018-06-13
# * module - ['module', module name]
# * other - [<type name>, string representation of object]
# * compound object reference - ['REF', target object's unique_id]
#
# the unique_id is derived from id(), which allows us to capture aliasing
# number of significant digits for floats
FLOAT_PRECISION = 4
from collections import defaultdict
import re, types
import sys
import math
typeRE = re.compile("<type '(.*)'>")
classRE = re.compile("<class '(.*)'>")
import inspect
# TODO: maybe use the 'six' library to smooth over Py2 and Py3 incompatibilities?
is_python3 = sys.version_info[0] == 3
assert is_python3
# avoid name errors (GROSS!)
long = int
unicode = str
def is_class(dat):
"""Return whether dat is a class."""
return isinstance(dat, type)
def is_instance(dat):
"""Return whether dat is an instance of a class."""
return (
type(dat) not in PRIMITIVE_TYPES
and isinstance(type(dat), type)
and not isinstance(dat, type)
)
def get_name(obj):
"""Return the name of an object."""
return obj.__name__ if hasattr(obj, "__name__") else get_name(type(obj))
PRIMITIVE_TYPES = (int, long, float, str, unicode, bool, type(None))
def encode_primitive(dat):
t = type(dat)
if t is float:
if math.isinf(dat):
if dat > 0:
return ["SPECIAL_FLOAT", "Infinity"]
else:
return ["SPECIAL_FLOAT", "-Infinity"]
elif math.isnan(dat):
return ["SPECIAL_FLOAT", "NaN"]
else:
# render floats like 3.0 as '3.0' and not as 3
if dat == int(dat):
return ["SPECIAL_FLOAT", "%.1f" % dat]
else:
return round(dat, FLOAT_PRECISION)
else:
# return all other primitives verbatim
return dat
# grab a line number like ' <line 2>' or ' <line 2b>'
def create_lambda_line_number(codeobj, line_to_lambda_code):
try:
lambda_lineno = codeobj.co_firstlineno
lst = line_to_lambda_code[lambda_lineno]
ind = lst.index(codeobj)
# add a suffix for all subsequent lambdas on a line beyond the first
# (nix this for now because order isn't guaranteed when you have
# multiple lambdas on the same line)
"""
if ind > 0:
lineno_str = str(lambda_lineno) + chr(ord('a') + ind)
else:
lineno_str = str(lambda_lineno)
"""
lineno_str = str(lambda_lineno)
return " <line " + lineno_str + ">"
except:
return ""
# Note that this might BLOAT MEMORY CONSUMPTION since we're holding on
# to every reference ever created by the program without ever releasing
# anything!
class ObjectEncoder:
def __init__(self, parent):
self.parent = parent # should be a PGLogger object
# Key: canonicalized small ID
# Value: encoded (compound) heap object
self.encoded_heap_objects = {}
self.render_heap_primitives = parent.render_heap_primitives
self.id_to_small_IDs = {}
self.cur_small_ID = 1
# wow, creating unique identifiers for lambdas is quite annoying,
# especially if we want to properly differentiate:
# 1.) multiple lambdas defined on the same line, and
# 2.) the same lambda code defined multiple times on different lines
#
# However, it gets confused when there are multiple identical
# lambdas on the same line, like:
# f(lambda x:x*x, lambda y:y*y, lambda x:x*x)
# (assumes everything is in one file)
# Key: line number
# Value: list of the code objects of lambdas defined
# on that line in the order they were defined
self.line_to_lambda_code = defaultdict(list)
def should_hide_var(self, var):
return self.parent.should_hide_var(var)
# searches through self.parents.types_to_inline and tries
# to match the type returned by type(obj).__name__ and
# also 'class' and 'instance' for classes and instances, respectively
def should_inline_object_by_type(self, obj):
# fast-pass optimization -- common case
if not self.parent.types_to_inline:
return False
# copy-pasted from the end of self.encode()
typ = type(obj)
typename = typ.__name__
# pick up built-in functions too:
if typ in (
types.FunctionType,
types.MethodType,
types.BuiltinFunctionType,
types.BuiltinMethodType,
):
typename = "function"
if not typename:
return False
alt_typename = None
if is_class(obj):
alt_typename = "class"
elif is_instance(obj) and typename != "function":
# if obj is an instance of the Fooo class, then we want to match
# on both 'instance' and 'Fooo'
# (exception: 'function' objects are sometimes also instances,
# but we still want to call them 'function', so ignore them)
typename = "instance"
class_name = None
if hasattr(obj, "__class__"):
# common case ...
class_name = get_name(obj.__class__)
else:
# super special case for something like
# "from datetime import datetime_CAPI" in Python 3.2,
# which is some weird 'PyCapsule' type ...
# http://docs.python.org/release/3.1.5/c-api/capsule.html
class_name = get_name(type(obj))
alt_typename = class_name
for re_match in self.parent.types_to_inline:
if re_match(typename):
return True
if alt_typename and re_match(alt_typename):
return True
return False
def get_heap(self):
return self.encoded_heap_objects
def reset_heap(self):
# VERY IMPORTANT to reassign to an empty dict rather than just
# clearing the existing dict, since get_heap() could have been
# called earlier to return a reference to a previous heap state
self.encoded_heap_objects = {}
def set_function_parent_frame_ID(self, ref_obj, enclosing_frame_id):
assert ref_obj[0] == "REF"
func_obj = self.encoded_heap_objects[ref_obj[1]]
assert func_obj[0] == "FUNCTION"
func_obj[-1] = enclosing_frame_id
# return either a primitive object or an object reference;
# and as a side effect, update encoded_heap_objects
def encode(self, dat, get_parent):
"""Encode a data value DAT using the GET_PARENT function for parent ids."""
# primitive type
if not self.render_heap_primitives and type(dat) in PRIMITIVE_TYPES:
return encode_primitive(dat)
# compound type - return an object reference and update encoded_heap_objects
else:
# IMPORTED_FAUX_PRIMITIVE feature added on 2018-06-13:
is_externally_defined = (
False # is dat defined in external (i.e., non-user) code?
)
try:
# some objects don't return anything for getsourcefile() but DO return
# something legit for getmodule(). e.g., "from io import StringIO"
# so TRY getmodule *first* and then fall back on getsourcefile
# since getmodule seems more robust empirically ...
gsf = inspect.getmodule(dat).__file__
if not gsf:
gsf = inspect.getsourcefile(dat)
# a hacky heuristic is that if gsf is an absolute path, then it's likely
# to be some library function and *not* in user-defined code
#
# NB: don't use os.path.isabs() since it doesn't work on some
# python installations (e.g., on my webserver) and also adds a
# dependency on the os module. just do a simple check:
#
# hacky: do other checks for strings that are indicative of files
# that load user-written code, like 'generate_json_trace.py'
if gsf and gsf[0] == "/" and "generate_json_trace.py" not in gsf:
is_externally_defined = True
except (AttributeError, TypeError):
pass # fail soft
my_id = id(dat)
# if dat is an *real* object instance (and not some special built-in one
# like ABCMeta, or a py3 function object), then DON'T treat it as
# externally-defined because a user might be instantiating an *instance*
# of an imported class in their own code, so we want to show that instance
# in da visualization - ugh #hacky
if (
is_instance(dat)
and type(dat)
not in (
types.FunctionType,
types.MethodType,
types.BuiltinFunctionType,
types.BuiltinMethodType,
)
and hasattr(dat, "__class__")
and (get_name(dat.__class__) != "ABCMeta")
):
is_externally_defined = False
# if this is an externally-defined object (i.e., from an imported
# module, don't try to recurse into it since we don't want to see
# the internals of imported objects; just return an
# IMPORTED_FAUX_PRIMITIVE object and continue along on our way
if is_externally_defined:
label = "object"
try:
label = type(dat).__name__
if is_class(dat):
label = "class"
elif is_instance(dat):
label = "object"
except:
pass
return ["IMPORTED_FAUX_PRIMITIVE", "imported " + label] # punt early!
# next check whether it should be inlined
if self.should_inline_object_by_type(dat):
label = "object"
try:
label = type(dat).__name__
if is_class(dat):
class_name = get_name(dat)
label = class_name + " class"
elif is_instance(dat):
# a lot of copy-pasta from other parts of this file:
# TODO: clean up
class_name = None
if hasattr(dat, "__class__"):
# common case ...
class_name = get_name(dat.__class__)
else:
# super special case for something like
# "from datetime import datetime_CAPI" in Python 3.2,
# which is some weird 'PyCapsule' type ...
# http://docs.python.org/release/3.1.5/c-api/capsule.html
class_name = get_name(type(dat))
if class_name:
label = class_name + " instance"
else:
label = "instance"
except:
pass
return ["IMPORTED_FAUX_PRIMITIVE", label + " (hidden)"] # punt early!
try:
my_small_id = self.id_to_small_IDs[my_id]
except KeyError:
my_small_id = self.cur_small_ID
self.id_to_small_IDs[my_id] = self.cur_small_ID
self.cur_small_ID += 1
del my_id # to prevent bugs later in this function
ret = ["REF", my_small_id]
# punt early if you've already encoded this object
if my_small_id in self.encoded_heap_objects:
return ret
# major side-effect!
new_obj = []
self.encoded_heap_objects[my_small_id] = new_obj
typ = type(dat)
if typ == list:
new_obj.append("LIST")
for e in dat:
new_obj.append(self.encode(e, get_parent))
elif typ == tuple:
new_obj.append("TUPLE")
for e in dat:
new_obj.append(self.encode(e, get_parent))
elif typ == set:
new_obj.append("SET")
for e in dat:
new_obj.append(self.encode(e, get_parent))
elif typ == dict:
new_obj.append("DICT")
for k, v in dat.items():
# don't display some built-in locals ...
if k not in ("__module__", "__return__", "__locals__"):
new_obj.append(
[self.encode(k, get_parent), self.encode(v, get_parent)]
)
elif typ in (types.FunctionType, types.MethodType):
argspec = inspect.getfullargspec(dat)
printed_args = [e for e in argspec.args]
default_arg_names_and_vals = []
if argspec.defaults:
num_missing_defaults = len(printed_args) - len(argspec.defaults)
assert num_missing_defaults >= 0
# tricky tricky tricky how default positional arguments work!
for i in range(num_missing_defaults, len(printed_args)):
default_arg_names_and_vals.append(
(
printed_args[i],
self.encode(
argspec.defaults[i - num_missing_defaults],
get_parent,
),
)
)
if argspec.varargs:
printed_args.append("*" + argspec.varargs)
# kwonlyargs come before varkw
if argspec.kwonlyargs:
printed_args.extend(argspec.kwonlyargs)
if argspec.kwonlydefaults:
# iterate in order of appearance in kwonlyargs
for varname in argspec.kwonlyargs:
if varname in argspec.kwonlydefaults:
val = argspec.kwonlydefaults[varname]
default_arg_names_and_vals.append(
(varname, self.encode(val, get_parent))
)
if argspec.varkw:
printed_args.append("**" + argspec.varkw)
func_name = get_name(dat)
pretty_name = func_name
# sometimes might fail for, say, <genexpr>, so just ignore
# failures for now ...
try:
pretty_name += "(" + ", ".join(printed_args) + ")"
except TypeError:
pass
# put a line number suffix on lambdas to more uniquely identify
# them, since they don't have names
if func_name == "<lambda>":
cod = dat.__code__
lst = self.line_to_lambda_code[cod.co_firstlineno]
if cod not in lst:
lst.append(cod)
pretty_name += create_lambda_line_number(
cod, self.line_to_lambda_code
)
encoded_val = ["FUNCTION", pretty_name, None]
if get_parent:
enclosing_frame_id = get_parent(dat)
encoded_val[2] = enclosing_frame_id
new_obj.extend(encoded_val)
# OPTIONAL!!!
if default_arg_names_and_vals:
new_obj.append(
default_arg_names_and_vals
) # *append* it as a single list element
elif typ is types.BuiltinFunctionType:
pretty_name = get_name(dat) + "(...)"
new_obj.extend(["FUNCTION", pretty_name, None])
elif is_class(dat) or is_instance(dat):
self.encode_class_or_instance(dat, new_obj)
elif typ is types.ModuleType:
new_obj.extend(["module", dat.__name__])
elif typ in PRIMITIVE_TYPES:
assert self.render_heap_primitives
new_obj.extend(
["HEAP_PRIMITIVE", type(dat).__name__, encode_primitive(dat)]
)
else:
typeStr = str(typ)
m = typeRE.match(typeStr)
if not m:
m = classRE.match(typeStr)
assert m, typ
encoded_dat = str(dat)
new_obj.extend([m.group(1), encoded_dat])
return ret
def encode_class_or_instance(self, dat, new_obj):
"""Encode dat as a class or instance."""
if is_instance(dat):
if hasattr(dat, "__class__"):
# common case ...
class_name = get_name(dat.__class__)
else:
# super special case for something like
# "from datetime import datetime_CAPI" in Python 3.2,
# which is some weird 'PyCapsule' type ...
# http://docs.python.org/release/3.1.5/c-api/capsule.html
class_name = get_name(type(dat))
pprint_str = None
# do you or any of your superclasses have a __str__ field? if so, pretty-print yourself!
if hasattr(dat, "__str__"):
try:
pprint_str = dat.__str__()
# sometimes you'll get 'trivial' pprint_str like: '<__main__.MyObj object at 0x10f465cd0>'
# or '<module 'collections' ...'
# IGNORE THOSE!!!
if (
pprint_str[0] == "<"
and pprint_str[-1] == ">"
and (" at " in pprint_str or pprint_str.startswith("<module"))
):
pprint_str = None
except:
pass
# TODO: filter for trivial-looking pprint_str like those produced
# by object.__str__
if pprint_str:
new_obj.extend(["INSTANCE_PPRINT", class_name, pprint_str])
else:
new_obj.extend(["INSTANCE", class_name])
# don't traverse inside modules, or else risk EXPLODING the visualization
if class_name == "module":
return
else:
superclass_names = [e.__name__ for e in dat.__bases__ if e is not object]
new_obj.extend(["CLASS", get_name(dat), superclass_names])
# traverse inside of its __dict__ to grab attributes
# (filter out useless-seeming ones, based on anecdotal observation):
hidden = (
"__doc__",
"__module__",
"__return__",
"__dict__",
"__locals__",
"__weakref__",
"__qualname__",
)
if hasattr(dat, "__dict__"):
user_attrs = sorted([e for e in dat.__dict__ if e not in hidden])
else:
user_attrs = []
for attr in user_attrs:
if not self.should_hide_var(attr):
new_obj.append(
[self.encode(attr, None), self.encode(dat.__dict__[attr], None)]
)
# Generates a JSON trace that is compatible with the js/pytutor.ts frontend
import sys, json
import sys
import bdb # the KEY import here!
import re
import traceback
import types
# TODO: use the 'six' package to smooth out Py2 and Py3 differences
is_python3 = sys.version_info[0] == 3
import io
# upper-bound on the number of executed lines, in order to guard against
# infinite loops
# MAX_EXECUTED_LINES = 300
MAX_EXECUTED_LINES = 1000 # on 2016-05-01, I increased the limit from 300 to 1000 for Python due to popular user demand! and I also improved the warning message
# DEBUG = False
DEBUG = True
BREAKPOINT_STR = "#break"
# if a line starts with this string, then look for a comma-separated
# list of variables after the colon. *hide* those variables in da trace
#
# 2018-06-17:
# - now supports unix-style shell globs using the syntax in
# https://docs.python.org/3/library/fnmatch.html so you can write things
# like '#pythontutor_hide: _*' to hide all private instance variables
# - also now filters class and instance fields in addition to top-level vars
PYTUTOR_HIDE_STR = "#pythontutor_hide:"
# 2018-06-17: a comma-separated list of types that should be displayed *inline*
# like primitives, with their actual values HIDDEN to save space. for details
# of what types are legal to specify, see:
# pg_encoder.py:should_inline_object_by_type()
# - also accepts shell globs, just like PYTUTOR_HIDE_STR
PYTUTOR_INLINE_TYPE_STR = "#pythontutor_hide_type:"
CLASS_RE = re.compile("class\s+")
# copied-pasted from translate() in https://github.com/python/cpython/blob/2.7/Lib/fnmatch.py
def globToRegex(pat):
"""Translate a shell PATTERN to a regular expression.
There is no way to quote meta-characters.
"""
i, n = 0, len(pat)
res = ""
while i < n:
c = pat[i]
i = i + 1
if c == "*":
res = res + ".*"
elif c == "?":
res = res + "."
elif c == "[":
j = i
if j < n and pat[j] == "!":
j = j + 1
if j < n and pat[j] == "]":
j = j + 1
while j < n and pat[j] != "]":
j = j + 1
if j >= n:
res = res + "\\["
else:
stuff = pat[i:j].replace("\\", "\\\\")
i = j + 1
if stuff[0] == "!":
stuff = "^" + stuff[1:]
elif stuff[0] == "^":
stuff = "\\" + stuff
res = "%s[%s]" % (res, stuff)
else:
res = res + re.escape(c)
return res + "\Z(?ms)"
def compileGlobMatch(pattern):
# very important to use match and *not* search!
return re.compile(globToRegex(pattern)).match
# test globToRegex and compileGlobMatch
"""
for e in ('_*', '__*', '__*__', '*_$'):
stuff = compileGlobMatch(e)
for s in ('_test', 'test_', '_test_', '__test', '__test__'):
print(e, s, stuff(s) is not None)
"""
# From http://coreygoldberg.blogspot.com/2009/05/python-redirect-or-turn-off-stdout-and.html
class NullDevice:
def write(self, s):
pass
assert type(__builtins__) is types.ModuleType
BUILTIN_IMPORT = __builtins__.__import__
# Support interactive user input by:
#
# 1. running the entire program up to a call to raw_input (or input in py3),
# 2. bailing and returning a trace ending in a special 'raw_input' event,
# 3. letting the web frontend issue a prompt to the user to grab a string,
# 4. RE-RUNNING the whole program with that string added to input_string_queue,
# 5. which should bring execution to the next raw_input call (if
# available), or to termination.
# Repeat until no more raw_input calls are encountered.
# Note that this is mad inefficient, but is simple to implement!
# VERY IMPORTANT -- set random seed to 0 to ensure deterministic execution:
import random
random.seed(0)
# queue of input strings passed from either raw_input or mouse_input
# input_string_queue = []
class RawInputException(Exception):
pass
def raw_input_wrapper(prompt=""):
# global input_string_queue
print(raw_input_wrapper.input_string_queue)
if raw_input_wrapper.input_string_queue:
input_str = raw_input_wrapper.input_string_queue.pop(0)
# write the prompt and user input to stdout, to emulate what happens
# at the terminal
sys.stdout.write(str(prompt)) # always convert prompt into a string
sys.stdout.write(input_str + "\n") # newline to simulate the user hitting Enter
return input_str
# raise RawInputException(str(prompt)) # always convert prompt into a string
class MouseInputException(Exception):
pass
# def mouse_input_wrapper(prompt=""):
# if input_string_queue:
# return input_string_queue.pop(0)
# raise MouseInputException(prompt)
IGNORE_VARS = set(
("__builtins__", "__name__", "__exception__", "__doc__", "__package__")
)
# at_global_scope should be true only if 'frame' represents the global scope
def get_user_globals(frame, at_global_scope=False):
d = filter_var_dict(frame.f_globals)
# filter out __return__ for globals only, but NOT for locals
if "__return__" in d:
del d["__return__"]
return d
def get_user_locals(frame):
ret = filter_var_dict(frame.f_locals)
# special printing of list/set/dict comprehension objects as they are
# being built up incrementally ...
f_name = frame.f_code.co_name
if hasattr(frame, "f_valuestack"):
# for dict and set comprehensions, which have their own frames:
if f_name.endswith("comp>"):
for i, e in enumerate(
[e for e in frame.f_valuestack if type(e) in (list, set, dict)]
):
ret["_tmp" + str(i + 1)] = e
return ret
def filter_var_dict(d):
ret = {}
for k, v in d.items():
if k not in IGNORE_VARS:
ret[k] = v
return ret
# yield all function objects locally-reachable from frame,
# making sure to traverse inside all compound objects ...
def visit_all_locally_reachable_function_objs(frame):
for k, v in get_user_locals(frame).items():
for e in visit_function_obj(v, set()):
if e: # only non-null if it's a function object
assert type(e) in (types.FunctionType, types.MethodType)
yield e
# TODO: this might be slow if we're traversing inside lots of objects:
def visit_function_obj(v, ids_seen_set):
v_id = id(v)
# to prevent infinite loop
if v_id in ids_seen_set:
yield None
else:
ids_seen_set.add(v_id)
typ = type(v)
# simple base case
if typ in (types.FunctionType, types.MethodType):
yield v
# recursive cases
elif typ in (list, tuple, set):
for child in v:
for child_res in visit_function_obj(child, ids_seen_set):
yield child_res
elif typ == dict or is_class(v) or is_instance(v):
contents_dict = None
if typ == dict:
contents_dict = v
# warning: some classes or instances don't have __dict__ attributes
elif hasattr(v, "__dict__"):
contents_dict = v.__dict__
if contents_dict:
for key_child, val_child in contents_dict.items():
for key_child_res in visit_function_obj(key_child, ids_seen_set):
yield key_child_res
for val_child_res in visit_function_obj(val_child, ids_seen_set):
yield val_child_res
# degenerate base case
yield None
class PGLogger(bdb.Bdb):
# if custom_modules is non-empty, it should be a dict mapping module
# names to the python source code of each module. when _runscript is
# called, it will do "from <module> import *" for all modules in
# custom_modules before running the user's script and then trace all
# code within custom_modules
#
# if separate_stdout_by_module, then have a separate stdout stream
# for each module rather than all stdout going to a single stream
def __init__(
self,
cumulative_mode,
heap_primitives,
show_only_outputs,
finalizer_func,
disable_security_checks=False,
allow_all_modules=False,
custom_modules=None,
separate_stdout_by_module=False,
probe_exprs=None,
):
bdb.Bdb.__init__(self)
self.mainpyfile = ""
self._wait_for_mainpyfile = 0
if probe_exprs:
self.probe_exprs = probe_exprs
else:
self.probe_exprs = None
self.separate_stdout_by_module = separate_stdout_by_module
self.stdout_by_module = {} # Key: module name, Value: StringIO faux-stdout
self.modules_to_trace = set(["__main__"]) # always trace __main__!
# Key: module name
# Value: module's python code as a string
self.custom_modules = custom_modules
if self.custom_modules:
for module_name in self.custom_modules:
self.modules_to_trace.add(module_name)
self.disable_security_checks = disable_security_checks
self.allow_all_modules = allow_all_modules
# if we allow all modules, we shouldn't do security checks
# either since otherwise users can't really import anything
# because that will likely involve opening files on disk, which
# is disallowed by security checks
if self.allow_all_modules:
self.disable_security_checks = True
# if True, then displays ALL stack frames that have ever existed
# rather than only those currently on the stack (and their
# lexical parents)
self.cumulative_mode = cumulative_mode
# if True, then render certain primitive objects as heap objects
self.render_heap_primitives = heap_primitives
# if True, then don't render any data structures in the trace,
# and show only outputs
self.show_only_outputs = show_only_outputs
# a function that takes the output trace as a parameter and
# processes it
self.finalizer_func = finalizer_func
# each entry contains a dict with the information for a single
# executed line
self.trace = []
# if this is true, don't put any more stuff into self.trace
self.done = False
# if this is non-null, don't do any more tracing until a
# 'return' instruction with a stack gotten from
# get_stack_code_IDs() that matches wait_for_return_stack
self.wait_for_return_stack = None
# http://stackoverflow.com/questions/2112396/in-python-in-google-app-engine-how-do-you-capture-output-produced-by-the-print
self.GAE_STDOUT = sys.stdout
# Key: function object
# Value: parent frame
self.closures = {}
# Key: code object for a lambda
# Value: parent frame
self.lambda_closures = {}
# set of function objects that were defined in the global scope
self.globally_defined_funcs = set()
# Key: frame object
# Value: monotonically increasing small ID, based on call order
self.frame_ordered_ids = {}
self.cur_frame_id = 1
# List of frames to KEEP AROUND after the function exits.
# If cumulative_mode is True, then keep ALL frames in
# zombie_frames; otherwise keep only frames where
# nested functions were defined within them.
self.zombie_frames = []
# set of elements within zombie_frames that are also
# LEXICAL PARENTS of other frames
self.parent_frames_set = set()
# all globals that ever appeared in the program, in the order in
# which they appeared. note that this might be a superset of all
# the globals that exist at any particular execution point,
# since globals might have been deleted (using, say, 'del')
self.all_globals_in_order = []
# very important for this single object to persist throughout
# execution, or else canonical small IDs won't be consistent.
self.encoder = ObjectEncoder(self)
self.executed_script = None # Python script to be executed!
# if there is at least one line that ends with BREAKPOINT_STR,
# then activate "breakpoint mode", where execution should stop
# ONLY at breakpoint lines.
self.breakpoints = []
self.vars_to_hide = set() # a set of regex match objects
# created by compileGlobMatch() from
# the contents of PYTUTOR_HIDE_STR
self.types_to_inline = (
set()
) # a set of regex match objects derived from PYTUTOR_INLINE_TYPE_STR
self.prev_lineno = -1 # keep track of previous line just executed
def should_hide_var(self, var):
for re_match in self.vars_to_hide:
if re_match(var):
return True
return False
def get_user_stdout(self):
def encode_stringio(sio):
return sio.getvalue()
if self.separate_stdout_by_module:
ret = {}
for module_name in self.stdout_by_module:
ret[module_name] = encode_stringio(self.stdout_by_module[module_name])
return ret
else:
# common case - single stdout stream
return encode_stringio(self.user_stdout)
def get_frame_id(self, cur_frame):
return self.frame_ordered_ids[cur_frame]
# Returns the (lexical) parent of a function value.
def get_parent_of_function(self, val):
if val in self.closures:
return self.get_frame_id(self.closures[val])
elif val in self.lambda_closures:
return self.get_frame_id(self.lambda_closures[val])
else:
return None
# Returns the (lexical) parent frame of the function that was called
# to create the stack frame 'frame'.
#
# OKAY, this is a SUPER hack, but I don't see a way around it
# since it's impossible to tell exactly which function
# ('closure') object was called to create 'frame'.
#
# The Python interpreter doesn't maintain this information,
# so unless we hack the interpreter, we will simply have
# to make an educated guess based on the contents of local
# variables inherited from possible parent frame candidates.
def get_parent_frame(self, frame):
# print >> sys.stderr, 'get_parent_frame: frame.f_code', frame.f_code
for func_obj, parent_frame in self.closures.items():
# ok, there's a possible match, but let's compare the
# local variables in parent_frame to those of frame
# to make sure. this is a hack that happens to work because in
# Python, each stack frame inherits ('inlines') a copy of the
# variables from its (lexical) parent frame.
if func_obj.__code__ == frame.f_code:
all_matched = True