From 731c40adf856d59316da749081e7f39ba91e7222 Mon Sep 17 00:00:00 2001 From: "Xiaojian \"JJ\" Deng" Date: Fri, 1 Oct 2021 11:41:11 -0500 Subject: [PATCH] Update train.py topk should have a type of float (not int) and metavar of FLOAT It wasn't working before but after making that change, training seems to be progressing more uniformly. --- train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/train.py b/train.py index 617b4f3a..40099c36 100755 --- a/train.py +++ b/train.py @@ -615,7 +615,7 @@ def main(): group.add_argument('--gamma', help='Override R1 gamma', type=float, metavar='FLOAT') group.add_argument('--nkimg', help='Override starting count', type=int, metavar='INT') group.add_argument('--kimg', help='Override training duration', type=int, metavar='INT') - group.add_argument('--topk', help='utilize top-k training', type=int, metavar='FLOAT') + group.add_argument('--topk', help='utilize top-k training', type=float, metavar='FLOAT') group = parser.add_argument_group('discriminator augmentation') group.add_argument('--aug', help='Augmentation mode (default: ada)', choices=['noaug', 'ada', 'fixed', 'adarv'])