import os import sys import tempfile import torch import torch.distributed as dist import torch.nn as nn import torch.optim as optim import torch.multiprocessing as mp
from torch.nn.parallel import DistributedDataParallel as DDP
# On Windows platform, the torch.distributed package only # supports Gloo backend, FileStore and TcpStore. # For FileStore, set init_method parameter in init_process_group # to a local file. Example as follow: # init_method="file:///f:/libtmp/some_file" # dist.init_process_group( # "gloo", # rank=rank, # init_method=init_method, # world_size=world_size) # For TcpStore, same way as on Linux.