CineAI commited on
Commit
4082a36
1 Parent(s): 194c45c

Update llm/llm.py

Browse files
Files changed (1) hide show
  1. llm/llm.py +8 -3
llm/llm.py CHANGED
@@ -1,3 +1,9 @@
 
 
 
 
 
 
1
  import os
2
 
3
  import yaml
@@ -29,9 +35,8 @@ class LLM_chain:
29
  def __call__(self, entity: str, id: int = 0):
30
  try:
31
  data = self.__read_yaml()
32
- print(data)
33
- prompts = data["prompts"]
34
- template = prompts["prompt_template"][1]
35
  prompt = PromptTemplate(template=template, input_variables=["entity"])
36
  llm_chain = LLMChain(prompt=prompt, llm=self.llm, verbose=True)
37
  output = llm_chain.invoke(entity)
 
1
+ # {'prompts': [
2
+ # {'id': 1, 'prompt_template': 'Question: {question}\nAnswer: \n', 'description': 'simple question without a prompt', 'rate': 1},
3
+ # {'id': 2, 'prompt_template': "Question: {question}\nAnswer: Write a concise answer on the question with
4
+ # one example if it's possible. CONCISE ANSWER.\n", 'description': 'simple concise prompt', 'rate': 3}]}
5
+
6
+
7
  import os
8
 
9
  import yaml
 
35
  def __call__(self, entity: str, id: int = 0):
36
  try:
37
  data = self.__read_yaml()
38
+ prompts = data["prompts"][id] # get second prompt from yaml, need change id parameter to get other prompt
39
+ template = prompts["prompt_template"]
 
40
  prompt = PromptTemplate(template=template, input_variables=["entity"])
41
  llm_chain = LLMChain(prompt=prompt, llm=self.llm, verbose=True)
42
  output = llm_chain.invoke(entity)