add eval mode in sample
Browse files
README.md
CHANGED
@@ -60,6 +60,7 @@ tok_seq = torch.LongTensor(tok_seq).unsqueeze(0).to(device) # unsqueeze for bat
|
|
60 |
|
61 |
# prep model and forward
|
62 |
model.to(device)
|
|
|
63 |
|
64 |
with torch.inference_mode():
|
65 |
embeddings = model(tok_seq)
|
|
|
60 |
|
61 |
# prep model and forward
|
62 |
model.to(device)
|
63 |
+
model.eval() # deterministic
|
64 |
|
65 |
with torch.inference_mode():
|
66 |
embeddings = model(tok_seq)
|