CineAI commited on
Commit
2d03ac2
1 Parent(s): a07fc96

Update llm/llamacpp/lc_model.py

Browse files
Files changed (1) hide show
  1. llm/llamacpp/lc_model.py +12 -6
llm/llamacpp/lc_model.py CHANGED
@@ -40,6 +40,7 @@ class LC_TinyLlama(LLMInterface, ABC):
40
  logger.info("Model file successfully recorded")
41
  f.close()
42
  except OSError as e:
 
43
  logger.error(msg="Error while write a file to directory", exc_info=e)
44
 
45
  @staticmethod
@@ -50,13 +51,14 @@ class LC_TinyLlama(LLMInterface, ABC):
50
  data = yaml.safe_load(file)
51
  return data
52
  except Exception as e:
 
53
  logger.error(msg="Execution filed", exc_info=e)
54
 
55
  def execution(self):
56
  try:
57
  data = self.__read_yaml()
58
  prompts = data["prompts"][
59
- self.prompt_id] # get second prompt from yaml, need change id parameter to get other prompt
60
  template = prompts["prompt_template"]
61
  prompt = PromptTemplate(template=template, input_variables=["entity"])
62
 
@@ -76,13 +78,14 @@ class LC_TinyLlama(LLMInterface, ABC):
76
  output = llm_chain.invoke({"question": self.prompt_entity})
77
  return output
78
  except Exception as e:
 
79
  logger.critical(msg="Execution filed", exc_info=e)
80
 
81
  def clear_llm(self, unused_model_dict, current_lc):
82
  # If unused_model_dict is not empty
83
- if len(unused_model_dict) > 1 and unused_model_dict is not None:
84
  # go through key and value
85
- for key, value in zip(unused_model_dict.keys(), unused_model_dict.values()):
86
  # check if path is existing and key is not current using model
87
  if os.path.exists(value) and key != current_lc:
88
  # delete files from models directory except of current_lc
@@ -126,6 +129,7 @@ class LC_Phi3(LLMInterface, ABC):
126
  logger.info("Model file successfully recorded")
127
  f.close()
128
  except OSError as e:
 
129
  logger.error(msg="Error while write a file to directory", exc_info=e)
130
 
131
  @staticmethod
@@ -136,6 +140,7 @@ class LC_Phi3(LLMInterface, ABC):
136
  data = yaml.safe_load(file)
137
  return data
138
  except Exception as e:
 
139
  logger.error(msg="Execution filed", exc_info=e)
140
 
141
  def execution(self):
@@ -162,20 +167,21 @@ class LC_Phi3(LLMInterface, ABC):
162
  output = llm_chain.invoke({"question": self.prompt_entity})
163
  return output
164
  except Exception as e:
 
165
  logger.critical(msg="Execution filed", exc_info=e)
166
 
167
  def clear_llm(self, unused_model_dict, current_lc):
168
  # If unused_model_dict is not empty
169
- if len(unused_model_dict) > 1:
170
  # go through key and value
171
- for key, value in zip(unused_model_dict.keys(), unused_model_dict.values()):
172
  # check if path is existing and key is not current using model
173
  if os.path.exists(value) and key != current_lc:
174
  # delete files from models directory except of current_lc
175
  os.remove(value)
176
  logger.info(f"Successfully deleted file {value}")
177
  else:
178
- logger.info(f"Unfortunately dictionary empty")
179
 
180
  def get_unused(self, current_lc):
181
  models_dir = "../models"
 
40
  logger.info("Model file successfully recorded")
41
  f.close()
42
  except OSError as e:
43
+ print(f"Error while write a file to directory : {e}")
44
  logger.error(msg="Error while write a file to directory", exc_info=e)
45
 
46
  @staticmethod
 
51
  data = yaml.safe_load(file)
52
  return data
53
  except Exception as e:
54
+ print(f"Execution filed : {e}")
55
  logger.error(msg="Execution filed", exc_info=e)
56
 
57
  def execution(self):
58
  try:
59
  data = self.__read_yaml()
60
  prompts = data["prompts"][
61
+ self.prompt_id] # to get second prompt from yaml, need change id parameter to get other prompt
62
  template = prompts["prompt_template"]
63
  prompt = PromptTemplate(template=template, input_variables=["entity"])
64
 
 
78
  output = llm_chain.invoke({"question": self.prompt_entity})
79
  return output
80
  except Exception as e:
81
+ print(f"Execution filed : {e}")
82
  logger.critical(msg="Execution filed", exc_info=e)
83
 
84
  def clear_llm(self, unused_model_dict, current_lc):
85
  # If unused_model_dict is not empty
86
+ if len(unused_model_dict) > 1 or unused_model_dict is not None:
87
  # go through key and value
88
+ for key, value in unused_model_dict.items():
89
  # check if path is existing and key is not current using model
90
  if os.path.exists(value) and key != current_lc:
91
  # delete files from models directory except of current_lc
 
129
  logger.info("Model file successfully recorded")
130
  f.close()
131
  except OSError as e:
132
+ print(f"Error while write a file to directory : {e}")
133
  logger.error(msg="Error while write a file to directory", exc_info=e)
134
 
135
  @staticmethod
 
140
  data = yaml.safe_load(file)
141
  return data
142
  except Exception as e:
143
+ print(f"Execution filed : {e}")
144
  logger.error(msg="Execution filed", exc_info=e)
145
 
146
  def execution(self):
 
167
  output = llm_chain.invoke({"question": self.prompt_entity})
168
  return output
169
  except Exception as e:
170
+ print(f"Execution filed : {e}")
171
  logger.critical(msg="Execution filed", exc_info=e)
172
 
173
  def clear_llm(self, unused_model_dict, current_lc):
174
  # If unused_model_dict is not empty
175
+ if len(unused_model_dict) > 1 or unused_model_dict is not None:
176
  # go through key and value
177
+ for key, value in unused_model_dict.items():
178
  # check if path is existing and key is not current using model
179
  if os.path.exists(value) and key != current_lc:
180
  # delete files from models directory except of current_lc
181
  os.remove(value)
182
  logger.info(f"Successfully deleted file {value}")
183
  else:
184
+ logger.info(f"Unfortunately dictionary empty or None")
185
 
186
  def get_unused(self, current_lc):
187
  models_dir = "../models"