distributed / all_reduce.py
ariG23498's picture
ariG23498 HF Staff
Create all_reduce.py
17558f2 verified
raw
history blame contribute delete
492 Bytes
import torch
import torch.distributed as dist
def init_process():
dist.init_process_group(backend="nccl")
torch.cuda.set_device(dist.get_rank())
def example_reduce():
tensor = torch.tensor([dist.get_rank()] * 4, dtype=torch.float32).cuda()
print(f"Before reduce on rank {dist.get_rank()}: {tensor}")
dist.all_reduce(tensor, op=dist.ReduceOp.SUM)
print(f"After reduce on rank {dist.get_rank()}: {tensor}")
init_process()
example_reduce()
dist.destroy_process_group()