CineAI commited on
Commit
4e8158d
1 Parent(s): bfcce31

Update llm/llamacpp/lc_model.py

Browse files
Files changed (1) hide show
  1. llm/llamacpp/lc_model.py +20 -17
llm/llamacpp/lc_model.py CHANGED
@@ -1,5 +1,5 @@
1
- import logging
2
  import os
 
3
  from abc import ABC
4
 
5
  import requests
@@ -10,6 +10,8 @@ from langchain_community.llms import LlamaCpp
10
  from llm.config import config
11
  from llm.llm_interface import LLMInterface
12
 
 
 
13
  logger = logging.getLogger(__name__)
14
 
15
  logger.setLevel(logging.CRITICAL) # because if something went wrong in execution application can't be work anymore
@@ -23,6 +25,11 @@ file_handler.setFormatter(formatted)
23
 
24
  logger.addHandler(file_handler)
25
 
 
 
 
 
 
26
 
27
  class LC_TinyLlama(LLMInterface, ABC):
28
  def __init__(self, prompt_entity: str, prompt_id: int = 0):
@@ -34,7 +41,7 @@ class LC_TinyLlama(LLMInterface, ABC):
34
  try:
35
  get_file = requests.get(self.model_config["model_url"])
36
  if get_file.status_code == 200:
37
- path_to_model = os.path.join("..models", self.model_config["model_name"])
38
  with open(path_to_model, "wb") as f:
39
  f.write(get_file.content)
40
  logger.info("Model file successfully recorded")
@@ -48,10 +55,8 @@ class LC_TinyLlama(LLMInterface, ABC):
48
 
49
  @staticmethod
50
  def __read_yaml():
51
- print(os.listdir('.'))
52
- print(os.getcwd())
53
  try:
54
- yaml_file = os.path.join(".../", 'prompts.yaml')
55
  with open(yaml_file, 'r') as file:
56
  data = yaml.safe_load(file)
57
  return data
@@ -68,7 +73,7 @@ class LC_TinyLlama(LLMInterface, ABC):
68
  prompt = PromptTemplate(template=template, input_variables=["entity"])
69
 
70
  llm = LlamaCpp(
71
- model_path=os.path.join(".../models", self.model_config["model_name"]),
72
  temperature=self.model_config["temperature"],
73
  max_tokens=self.model_config["max_tokens"],
74
  top_p=self.model_config["top_p"],
@@ -100,13 +105,12 @@ class LC_TinyLlama(LLMInterface, ABC):
100
  logger.info(f"Unfortunately dictionary empty or None")
101
 
102
  def get_unused(self, current_lc):
103
- models_dir = ".../models"
104
 
105
- if len(os.listdir(models_dir)) > 1:
106
- file_names = [os.path.basename(md) for md in os.listdir(models_dir)]
107
  for item in file_names:
108
  if item != current_lc:
109
- unused_model_file = os.path.join(models_dir, item)
110
  return {item: unused_model_file}
111
  else:
112
  return None
@@ -128,7 +132,7 @@ class LC_Phi3(LLMInterface, ABC):
128
  try:
129
  get_file = requests.get(self.model_config["model_url"])
130
  if get_file.status_code == 200:
131
- path_to_model = os.path.join("../models", self.model_config["model_name"])
132
  with open(path_to_model, "wb") as f:
133
  f.write(get_file.content)
134
  logger.info("Model file successfully recorded")
@@ -142,7 +146,7 @@ class LC_Phi3(LLMInterface, ABC):
142
  @staticmethod
143
  def __read_yaml():
144
  try:
145
- yaml_file = os.path.join("../", 'prompts.yaml')
146
  with open(yaml_file, 'r') as file:
147
  data = yaml.safe_load(file)
148
  return data
@@ -159,7 +163,7 @@ class LC_Phi3(LLMInterface, ABC):
159
  prompt = PromptTemplate(template=template, input_variables=["entity"])
160
 
161
  llm = LlamaCpp(
162
- model_path=os.path.join("../models", self.model_config["model_name"]),
163
  temperature=self.model_config["temperature"],
164
  max_tokens=self.model_config["max_tokens"],
165
  top_p=self.model_config["top_p"],
@@ -191,13 +195,12 @@ class LC_Phi3(LLMInterface, ABC):
191
  logger.info(f"Unfortunately dictionary empty or None")
192
 
193
  def get_unused(self, current_lc):
194
- models_dir = "../models"
195
 
196
- if len(os.listdir(models_dir)) > 1:
197
- file_names = [os.path.basename(md) for md in os.listdir(models_dir)]
198
  for item in file_names:
199
  if item != current_lc:
200
- unused_model_file = os.path.join(models_dir, item)
201
  return {item: unused_model_file}
202
  else:
203
  return None
 
 
1
  import os
2
+ import logging
3
  from abc import ABC
4
 
5
  import requests
 
10
  from llm.config import config
11
  from llm.llm_interface import LLMInterface
12
 
13
+ print(os.getcwd())
14
+
15
  logger = logging.getLogger(__name__)
16
 
17
  logger.setLevel(logging.CRITICAL) # because if something went wrong in execution application can't be work anymore
 
25
 
26
  logger.addHandler(file_handler)
27
 
28
+ path_to_models = os.path.join(os.getcwd(), "/llm/models")
29
+ print(path_to_models)
30
+
31
+ path_to_prompts = os.path.join(os.getcwd(), "/llm/")
32
+ print(path_to_prompts)
33
 
34
  class LC_TinyLlama(LLMInterface, ABC):
35
  def __init__(self, prompt_entity: str, prompt_id: int = 0):
 
41
  try:
42
  get_file = requests.get(self.model_config["model_url"])
43
  if get_file.status_code == 200:
44
+ path_to_model = os.path.join(path_to_models, self.model_config["model_name"])
45
  with open(path_to_model, "wb") as f:
46
  f.write(get_file.content)
47
  logger.info("Model file successfully recorded")
 
55
 
56
  @staticmethod
57
  def __read_yaml():
 
 
58
  try:
59
+ yaml_file = os.path.join(path_to_prompts, 'prompts.yaml')
60
  with open(yaml_file, 'r') as file:
61
  data = yaml.safe_load(file)
62
  return data
 
73
  prompt = PromptTemplate(template=template, input_variables=["entity"])
74
 
75
  llm = LlamaCpp(
76
+ model_path=os.path.join(path_to_models, self.model_config["model_name"]),
77
  temperature=self.model_config["temperature"],
78
  max_tokens=self.model_config["max_tokens"],
79
  top_p=self.model_config["top_p"],
 
105
  logger.info(f"Unfortunately dictionary empty or None")
106
 
107
  def get_unused(self, current_lc):
 
108
 
109
+ if len(os.listdir(path_to_models)) > 1:
110
+ file_names = [os.path.basename(md) for md in os.listdir(path_to_models)]
111
  for item in file_names:
112
  if item != current_lc:
113
+ unused_model_file = os.path.join(path_to_models, item)
114
  return {item: unused_model_file}
115
  else:
116
  return None
 
132
  try:
133
  get_file = requests.get(self.model_config["model_url"])
134
  if get_file.status_code == 200:
135
+ path_to_model = os.path.join(path_to_models, self.model_config["model_name"])
136
  with open(path_to_model, "wb") as f:
137
  f.write(get_file.content)
138
  logger.info("Model file successfully recorded")
 
146
  @staticmethod
147
  def __read_yaml():
148
  try:
149
+ yaml_file = os.path.join(path_to_prompts, 'prompts.yaml')
150
  with open(yaml_file, 'r') as file:
151
  data = yaml.safe_load(file)
152
  return data
 
163
  prompt = PromptTemplate(template=template, input_variables=["entity"])
164
 
165
  llm = LlamaCpp(
166
+ model_path=os.path.join(path_to_models, self.model_config["model_name"]),
167
  temperature=self.model_config["temperature"],
168
  max_tokens=self.model_config["max_tokens"],
169
  top_p=self.model_config["top_p"],
 
195
  logger.info(f"Unfortunately dictionary empty or None")
196
 
197
  def get_unused(self, current_lc):
 
198
 
199
+ if len(os.listdir(path_to_models)) > 1:
200
+ file_names = [os.path.basename(md) for md in os.listdir(path_to_models)]
201
  for item in file_names:
202
  if item != current_lc:
203
+ unused_model_file = os.path.join(path_to_models, item)
204
  return {item: unused_model_file}
205
  else:
206
  return None