Update README.md
Browse fileschange chat template!!!
README.md
CHANGED
@@ -92,11 +92,20 @@ source_text = "Hello, how are you?"
|
|
92 |
source_lang = "en"
|
93 |
target_lang = "ko" # or "uz" for Uzbek
|
94 |
|
95 |
-
|
96 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
97 |
|
98 |
outputs = model.generate(input_ids, max_length=100)
|
99 |
-
|
|
|
100 |
print(translated_text)
|
101 |
```
|
102 |
## Performance
|
|
|
92 |
source_lang = "en"
|
93 |
target_lang = "ko" # or "uz" for Uzbek
|
94 |
|
95 |
+
messages = [
|
96 |
+
{"role": "system", "content": f"""Translate {input_lang} to {output_lang} word by word correctly."""},
|
97 |
+
{"role": "user", "content": f"""{source_text}"""},
|
98 |
+
]
|
99 |
+
# Apply chat template
|
100 |
+
input_ids = tokenizer.apply_chat_template(
|
101 |
+
messages,
|
102 |
+
add_generation_prompt=True,
|
103 |
+
return_tensors="pt"
|
104 |
+
).to('cuda')
|
105 |
|
106 |
outputs = model.generate(input_ids, max_length=100)
|
107 |
+
response = outputs[0][input_ids.shape[-1]:]
|
108 |
+
translated_text = tokenizer.decode(response, skip_special_tokens=True)
|
109 |
print(translated_text)
|
110 |
```
|
111 |
## Performance
|