GPUS=$1 nohup python3 -m torch.distributed.run --nproc_per_node=$GPUS fed_run.py ${@:2} > train.log 2>&1 & disown