From 7e72a96e2e1930709f9046de70c70eaabbc56040 Mon Sep 17 00:00:00 2001 From: Yeshwanth N Date: Tue, 26 Nov 2024 16:42:27 +0530 Subject: [PATCH] [BugFix]: KeyError: 'Adafactor is already registered in optimizer at torch.optim' --- mmengine/optim/optimizer/builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmengine/optim/optimizer/builder.py b/mmengine/optim/optimizer/builder.py index 7b4090ba7a..d47d5e4e5a 100644 --- a/mmengine/optim/optimizer/builder.py +++ b/mmengine/optim/optimizer/builder.py @@ -170,7 +170,7 @@ def register_transformers_optimizers(): except ImportError: pass else: - OPTIMIZERS.register_module(name='Adafactor', module=Adafactor) + OPTIMIZERS.register_module(name='Adafactor', module=Adafactor, force=True) # Same optimizer is introduced in PyTorch but transformers had it prior and forcing transformers one to keepup with backward compatibility. transformer_optimizers.append('Adafactor') return transformer_optimizers