Update README.md
Browse files
README.md
CHANGED
@@ -32,7 +32,7 @@ input_sequence = "TCACCGTTCTACAATCCCAAGCTGGAGTCAAGCTCAACAGGGTCTTC"
|
|
32 |
# Tokenize the input sequence
|
33 |
input_tokens = tokenizer.encode(input_sequence, return_tensors="pt", add_special_tokens=False)
|
34 |
|
35 |
-
# Generate output from the model
|
36 |
generated_tokens = model.generate(input_tokens, max_length=32)
|
37 |
|
38 |
# Decode the generated output and clean up the result
|
|
|
32 |
# Tokenize the input sequence
|
33 |
input_tokens = tokenizer.encode(input_sequence, return_tensors="pt", add_special_tokens=False)
|
34 |
|
35 |
+
# Generate output from the model
|
36 |
generated_tokens = model.generate(input_tokens, max_length=32)
|
37 |
|
38 |
# Decode the generated output and clean up the result
|