Update llm/llamacpp/lc_model.py
Browse files- llm/llamacpp/lc_model.py +4 -3
llm/llamacpp/lc_model.py
CHANGED
@@ -48,9 +48,10 @@ class LC_TinyLlama(LLMInterface, ABC):
|
|
48 |
|
49 |
@staticmethod
|
50 |
def __read_yaml():
|
51 |
-
print(os.listdir('
|
|
|
52 |
try:
|
53 |
-
yaml_file = os.path.join("
|
54 |
with open(yaml_file, 'r') as file:
|
55 |
data = yaml.safe_load(file)
|
56 |
return data
|
@@ -67,7 +68,7 @@ class LC_TinyLlama(LLMInterface, ABC):
|
|
67 |
prompt = PromptTemplate(template=template, input_variables=["entity"])
|
68 |
|
69 |
llm = LlamaCpp(
|
70 |
-
model_path=os.path.join("
|
71 |
temperature=self.model_config["temperature"],
|
72 |
max_tokens=self.model_config["max_tokens"],
|
73 |
top_p=self.model_config["top_p"],
|
|
|
48 |
|
49 |
@staticmethod
|
50 |
def __read_yaml():
|
51 |
+
print(os.listdir('.'))
|
52 |
+
print(os.getcwd())
|
53 |
try:
|
54 |
+
yaml_file = os.path.join(".../", 'prompts.yaml')
|
55 |
with open(yaml_file, 'r') as file:
|
56 |
data = yaml.safe_load(file)
|
57 |
return data
|
|
|
68 |
prompt = PromptTemplate(template=template, input_variables=["entity"])
|
69 |
|
70 |
llm = LlamaCpp(
|
71 |
+
model_path=os.path.join(".../models", self.model_config["model_name"]),
|
72 |
temperature=self.model_config["temperature"],
|
73 |
max_tokens=self.model_config["max_tokens"],
|
74 |
top_p=self.model_config["top_p"],
|