File size: 495 Bytes
2785183 d84b43f 2785183 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 | import torch
import torch.distributed as dist
def init_process():
dist.init_process_group(backend="nccl")
torch.cuda.set_device(dist.get_rank())
def example_reduce():
tensor = torch.tensor([dist.get_rank()] * 4, dtype=torch.float32).cuda()
print(f"Before reduce on rank {dist.get_rank()}: {tensor}")
dist.reduce(tensor, dst=0, op=dist.ReduceOp.SUM)
print(f"After reduce on rank {dist.get_rank()}: {tensor}")
init_process()
example_reduce()
dist.destroy_process_group() |