importosdefdist_env():""" Return a dict of all variable that distributed training may use. NOTE: you may rewrite this function to suit your cluster environments. """trainer_id=int(os.getenv("PADDLE_TRAINER_ID","0"))num_trainers=1training_role=os.getenv("PADDLE_TRAINING_ROLE","TRAINER")assert(training_role=="PSERVER"ortraining_role=="TRAINER")# - PADDLE_TRAINER_ENDPOINTS means nccl2 mode.# - PADDLE_PSERVER_ENDPOINTS means pserver mode.# - PADDLE_CURRENT_ENDPOINT means current process endpoint.trainer_endpoints=os.getenv("PADDLE_TRAINER_ENDPOINTS")pserver_endpoints=os.getenv("PADDLE_PSERVER_ENDPOINTS")current_endpoint=os.getenv("PADDLE_CURRENT_ENDPOINT")iftrainer_endpoints:trainer_endpoints=trainer_endpoints.split(",")num_trainers=len(trainer_endpoints)elifpserver_endpoints:num_trainers=int(os.getenv("PADDLE_TRAINERS_NUM"))return{"trainer_id":trainer_id,"num_trainers":num_trainers,"current_endpoint":current_endpoint,"training_role":training_role,"pserver_endpoints":pserver_endpoints,"trainer_endpoints":trainer_endpoints}