CineAI commited on
Commit
c805123
1 Parent(s): c59651a

Update llm/llamacpp/lc_model.py

Browse files
Files changed (1) hide show
  1. llm/llamacpp/lc_model.py +4 -4
llm/llamacpp/lc_model.py CHANGED
@@ -34,7 +34,7 @@ class LC_TinyLlama(LLMInterface, ABC):
34
  try:
35
  get_file = requests.get(self.model_config["model_url"])
36
  if get_file.status_code == 200:
37
- path_to_model = os.path.join("../models", self.model_config["model_name"])
38
  with open(path_to_model, "wb") as f:
39
  f.write(get_file.content)
40
  logger.info("Model file successfully recorded")
@@ -48,9 +48,9 @@ class LC_TinyLlama(LLMInterface, ABC):
48
 
49
  @staticmethod
50
  def __read_yaml():
51
- print(os.listdir('../'))
52
  try:
53
- yaml_file = os.path.join("../", 'prompts.yaml')
54
  with open(yaml_file, 'r') as file:
55
  data = yaml.safe_load(file)
56
  return data
@@ -67,7 +67,7 @@ class LC_TinyLlama(LLMInterface, ABC):
67
  prompt = PromptTemplate(template=template, input_variables=["entity"])
68
 
69
  llm = LlamaCpp(
70
- model_path=os.path.join("../models", self.model_config["model_name"]),
71
  temperature=self.model_config["temperature"],
72
  max_tokens=self.model_config["max_tokens"],
73
  top_p=self.model_config["top_p"],
 
34
  try:
35
  get_file = requests.get(self.model_config["model_url"])
36
  if get_file.status_code == 200:
37
+ path_to_model = os.path.join("..models", self.model_config["model_name"])
38
  with open(path_to_model, "wb") as f:
39
  f.write(get_file.content)
40
  logger.info("Model file successfully recorded")
 
48
 
49
  @staticmethod
50
  def __read_yaml():
51
+ print(os.listdir('..'))
52
  try:
53
+ yaml_file = os.path.join("..", 'prompts.yaml')
54
  with open(yaml_file, 'r') as file:
55
  data = yaml.safe_load(file)
56
  return data
 
67
  prompt = PromptTemplate(template=template, input_variables=["entity"])
68
 
69
  llm = LlamaCpp(
70
+ model_path=os.path.join("..models", self.model_config["model_name"]),
71
  temperature=self.model_config["temperature"],
72
  max_tokens=self.model_config["max_tokens"],
73
  top_p=self.model_config["top_p"],