futranbg commited on
Commit
6bf3d15
·
1 Parent(s): 119eed3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -7
app.py CHANGED
@@ -13,6 +13,7 @@ End of the document.[/INST]
13
  {target} translated document:
14
 
15
  """
 
16
  starchat_template = """<|system|>I want you to act as document language translator. You do translation {source} texts in document into then you return to me the translated document AND DO NOTHING ELSE.<</SYS>>
17
  Begin of the document:
18
  {query}
@@ -22,12 +23,10 @@ End of the document<|end|>
22
 
23
  """
24
 
25
- starchat_template = """Translation {source} texts into {target}. then you return to me the translated document AND DO NOTHING ELSE.<</SYS>>
26
- Begin of the texts:
27
  {query}
28
- End of the texts
29
-
30
- {target} translated texts:
31
 
32
  """
33
  model_kwargs={
@@ -43,7 +42,7 @@ bloom_model_kwargs={
43
  "temperature": 0.01,
44
  "truncate": 1000,
45
  "seed" : 256,
46
- "stop" : ["</s>","<|endoftext|>","<|end|>"],
47
  }
48
 
49
  llm1 = HuggingFaceHub(repo_id=llama_repo, task="text-generation", model_kwargs=model_kwargs)
@@ -53,7 +52,7 @@ llm3 = HuggingFaceHub(repo_id=bloom_repo, task="text-generation", model_kwargs=b
53
  def translation(source, target, text):
54
  response = text
55
  try:
56
- input_prompt = llamma_template.replace("{source}", source)
57
  input_prompt = input_prompt.replace("{target}", target)
58
  input_prompt = input_prompt.replace("{query}", text)
59
  response=llm3(input_prompt)
 
13
  {target} translated document:
14
 
15
  """
16
+
17
  starchat_template = """<|system|>I want you to act as document language translator. You do translation {source} texts in document into then you return to me the translated document AND DO NOTHING ELSE.<</SYS>>
18
  Begin of the document:
19
  {query}
 
23
 
24
  """
25
 
26
+ bloom_template = """Translate bellow {source} text into {target}:
 
27
  {query}
28
+ ======
29
+ {target} translated text:
 
30
 
31
  """
32
  model_kwargs={
 
42
  "temperature": 0.01,
43
  "truncate": 1000,
44
  "seed" : 256,
45
+ "stop" : ["</s>","<|endoftext|>","<|end|>","======"],
46
  }
47
 
48
  llm1 = HuggingFaceHub(repo_id=llama_repo, task="text-generation", model_kwargs=model_kwargs)
 
52
  def translation(source, target, text):
53
  response = text
54
  try:
55
+ input_prompt = bloom_template.replace("{source}", source)
56
  input_prompt = input_prompt.replace("{target}", target)
57
  input_prompt = input_prompt.replace("{query}", text)
58
  response=llm3(input_prompt)