Skip to content

Commit

Permalink
Enable tests (#104)
Browse files Browse the repository at this point in the history
  • Loading branch information
feifei-111 authored Feb 26, 2024
1 parent 6d6ae08 commit 6997a36
Show file tree
Hide file tree
Showing 5 changed files with 15 additions and 12 deletions.
11 changes: 7 additions & 4 deletions padiff/abstracts/marker.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def update_unassigned_weights_list(self, layers, mode="all"):

def set_layer_map(self, layer_map):
_layer_map = []
for layer in self.traversal_for_assign_weight():
for layer in self.traversal_for_layer_map():
if layer.model in layer_map:
self.unassigned_weights_list_recursively.add(layer.model)
_layer_map.append(layer)
Expand All @@ -79,7 +79,7 @@ def auto_layer_map(self, model_place):
registered = init_pool.registered_base_models if model_place == "base" else init_pool.registered_raw_models

log("Auto set layer_map start searching...")
for layer in self.traversal_for_auto_layer_map():
for layer in self.traversal_for_layer_map():
if layer.fullname in registered:
print(f"++++ {model_place}_model found `{layer.fullname}` add to layer_map ++++")
_layer_map.append(layer)
Expand All @@ -105,11 +105,14 @@ def traversal_for_hook(self):
def traversal_for_assign_weight(self):
yield self.proxy_model
for model in traversal_for_assign_weight(self.proxy_model, self):
if len(list(model.parameters(recursively=False))) == 0:
if (
model.model not in self.unassigned_weights_list_recursively
and len(list(model.parameters(recursively=False))) == 0
):
continue
yield model

def traversal_for_auto_layer_map(self):
def traversal_for_layer_map(self):
yield self.proxy_model
for model in traversal_for_assign_weight(self.proxy_model, self):
yield model
Expand Down
8 changes: 4 additions & 4 deletions padiff/report/hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
for_each_grad_tensor,
extract_frame_summary,
)
from paddle.utils import to_sequence
import json
import numpy
import paddle
Expand Down Expand Up @@ -133,8 +134,7 @@ def info_hook(model, input, output, net_id):
# two report_item with same id, the step_idx should be corresponded
step_idx = len(list(filter(lambda x: x.type == "forward" and x.net_id == net_id, report.items))) - 1
base_report_node = find_base_report_node(net_id, step_idx)

retval = map_structure_and_replace_key(replace_forward_output(base_report_node), output, output)
retval = map_structure_and_replace_key(replace_forward_output(base_report_node), to_sequence(output), output)
__in_info_hook__ = False
return retval
else:
Expand All @@ -158,7 +158,7 @@ def tensor_hook(x_grad, bwd_item, nth_tensor, net_id):
)
base_report_node = find_base_report_node(net_id, step_idx)

value = numpy.load(base_report_node["bwd_grads"][nth_tensor])
value = numpy.load(base_report_node["bwd_grads"][nth_tensor]["path"])
if isinstance(x_grad, paddle.Tensor):
return paddle.to_tensor(value)
else:
Expand Down Expand Up @@ -259,7 +259,7 @@ def inner(input_):
raise RuntimeError(
"In single step mode, try to replace tensor by dumpped numpy value, but the number of tensors and numpy is not equal. Maybe the models are not corresponded."
)
value = numpy.load(numpy_file_list[cur_idx])
value = numpy.load(numpy_file_list[cur_idx]["path"])
if isinstance(input_, paddle.Tensor):
return paddle.to_tensor(value)
else:
Expand Down
2 changes: 1 addition & 1 deletion padiff/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import torch


from paddle.utils import flatten, pack_sequence_as, map_structure
from paddle.utils import flatten, map_structure, pack_sequence_as


"""
Expand Down
2 changes: 2 additions & 0 deletions tests/padiff_unittests.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
# limitations under the License.

import os
import sys
import subprocess


Expand All @@ -25,6 +26,7 @@
err_info = f"*** ===================== {fpath} ========================= ***\n"
err_info += f"{output}\n"
print(f"Failed on unittest {fname} with error message \n {err_info}.", end="\n", flush=True)
sys.exit(1)
else:
print(f"Succeed on unittest {fname}.", end="\n", flush=True)
os.system("rm -rf ./tests/padiff_dump ./tests/padiff_log")
4 changes: 1 addition & 3 deletions tests/test_api_to_Layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,9 +61,7 @@ def test_api_to_Layer(self):
inp = paddle.rand((100, 100), dtype="float32")

layer(inp)
layer.report

assert len(layer.report.items) == 12
assert len(layer.report.items) == 10


if __name__ == "__main__":
Expand Down

0 comments on commit 6997a36

Please sign in to comment.