distributed / broadcast.py
ariG23498's picture
ariG23498 HF Staff
Update broadcast.py
51adf3e verified
raw
history blame contribute delete
583 Bytes
import torch
import torch.distributed as dist
def init_process():
dist.init_process_group(backend="nccl")
torch.cuda.set_device(dist.get_rank())
def example_broadcast():
if dist.get_rank() == 0:
tensor = torch.tensor([1, 2, 3, 4], dtype=torch.float32).cuda()
else:
tensor = torch.zeros(4, dtype=torch.float32).cuda()
print(f"Before broadcast on rank {dist.get_rank()}: {tensor}")
dist.broadcast(tensor, src=0)
print(f"After broadcast on rank {dist.get_rank()}: {tensor}")
init_process()
example_broadcast()
dist.destroy_process_group()