Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 18 additions & 14 deletions wan/utils/multitalk_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,21 +49,25 @@ def torch_gc():
def split_token_counts_and_frame_ids(T, token_frame, world_size, rank):

S = T * token_frame
split_sizes = [S // world_size + (1 if i < S % world_size else 0) for i in range(world_size)]
start = sum(split_sizes[:rank])

# compute split sizes per rank
base = S // world_size
rem = S % world_size
split_sizes = torch.full((world_size,), base, dtype=torch.long)
split_sizes[:rem] += 1

start = split_sizes[:rank].sum()
end = start + split_sizes[rank]
counts = [0] * T
for idx in range(start, end):
t = idx // token_frame
counts[t] += 1

counts_filtered = []
frame_ids = []
for t, c in enumerate(counts):
if c > 0:
counts_filtered.append(c)
frame_ids.append(t)
return counts_filtered, frame_ids

# vectorized mapping: global index -> frame id
idx = torch.arange(start, end, dtype=torch.long)
frame_ids = idx // token_frame

# unique counts
unique_frames, counts = torch.unique(frame_ids, return_counts=True)

# return as Python list (optional)
return counts.tolist(), unique_frames.tolist()


def normalize_and_scale(column, source_range, target_range, epsilon=1e-8):
Expand Down