Skip to content

Commit 129adb3

Browse files
committed
Format
Signed-off-by: Fynn Schmitt-Ulms <[email protected]>
1 parent 38fc772 commit 129adb3

File tree

1 file changed

+6
-4
lines changed
  • src/speculators/train/eagle3

1 file changed

+6
-4
lines changed

src/speculators/train/eagle3/core.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,9 @@ def _setup_embeddings_and_lm_heads(self, config: VerifierConfig, t2d: torch.Tens
156156
# shape: [verifier_vocab_size, hidden_size]
157157
default_dtype = self.embed_tokens.weight.dtype
158158

159-
embed_tokens_sd = {"weight": verifier_weights["model.embed_tokens.weight"].to(default_dtype)}
159+
embed_tokens_sd = {
160+
"weight": verifier_weights["model.embed_tokens.weight"].to(default_dtype)
161+
}
160162
self.embed_tokens.load_state_dict(embed_tokens_sd)
161163
self.embed_tokens.weight.requires_grad = False
162164

@@ -169,9 +171,9 @@ def _setup_embeddings_and_lm_heads(self, config: VerifierConfig, t2d: torch.Tens
169171
self.hidden_size, self.draft_vocab_size, bias=False
170172
)
171173

172-
masked_lm_head_weight = verifier_weights["lm_head.weight"].to(device=t2d.device, dtype=default_dtype)[
173-
t2d.to(torch.bool), :
174-
]
174+
masked_lm_head_weight = verifier_weights["lm_head.weight"].to(
175+
device=t2d.device, dtype=default_dtype
176+
)[t2d.to(torch.bool), :]
175177
if masked_lm_head_weight.shape != self.lm_head.weight.shape:
176178
raise ValueError(
177179
f"Masked verifier lm head data shape "

0 commit comments

Comments
 (0)