Med Tiouti commited on
Commit
42bddce
1 Parent(s): cfd0845

Fix tunpod Mounted Volume Path

Browse files
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -20,8 +20,9 @@ from langchain import PromptTemplate, LLMChain
20
 
21
  # vector stores
22
  from langchain.vectorstores import FAISS
 
23
 
24
- cache_path = "./runpod-volume"
25
  model_repo = 'daryl149/llama-2-13b-chat-hf'
26
 
27
  tokenizer = AutoTokenizer.from_pretrained(model_repo, use_fast=True, cache_dir=cache_path)
@@ -127,6 +128,7 @@ def process_llm_response(llm_response):
127
  return ans,sources_used
128
 
129
  def text_generation(job):
 
130
  llm_response = qa_chain(job["input"]["prompt"])
131
  ans,sources_used = process_llm_response(llm_response)
132
 
 
20
 
21
  # vector stores
22
  from langchain.vectorstores import FAISS
23
+ import os
24
 
25
+ cache_path = "/runpod-volume"
26
  model_repo = 'daryl149/llama-2-13b-chat-hf'
27
 
28
  tokenizer = AutoTokenizer.from_pretrained(model_repo, use_fast=True, cache_dir=cache_path)
 
128
  return ans,sources_used
129
 
130
  def text_generation(job):
131
+ # print(os.listdir(cache_path))
132
  llm_response = qa_chain(job["input"]["prompt"])
133
  ans,sources_used = process_llm_response(llm_response)
134