assert(dist.is_initialized()==True),"Torch distributed not initialized. Please set dist_init_required to True or initialize before calling deepspeed.initialize()"
self.dist_backend="nccl"
ifdist_init_required:
ifnotdist.is_initialized():
logger.info("Initializing torch distributed with backend: {}".format(