From c7bab26f666b2f7a87dd5718154ad3733b66fbde Mon Sep 17 00:00:00 2001 From: Erick Matsen Date: Tue, 9 Jul 2024 16:45:39 -0700 Subject: [PATCH] - --- netam/framework.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/netam/framework.py b/netam/framework.py index a9fdce6a..b2261d06 100644 --- a/netam/framework.py +++ b/netam/framework.py @@ -421,7 +421,7 @@ def reset_optimization(self, learning_rate=None): if learning_rate is None: learning_rate = self.learning_rate - # copied from # https://github.com/karpathy/nanoGPT/blob/9755682b981a45507f6eb9b11eadef8cb83cebd5/model.py#L264 + # copied from https://github.com/karpathy/nanoGPT/blob/9755682b981a45507f6eb9b11eadef8cb83cebd5/model.py#L264 param_dict = { pn: p for pn, p in self.model.named_parameters() if p.requires_grad }