vidhiparikh commited on
Commit
e17ba1d
1 Parent(s): ba8599a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -1,5 +1,6 @@
1
  import PyPDF2
2
  import gradio as gr
 
3
  from langchain.text_splitter import RecursiveCharacterTextSplitter
4
  from langchain_community.llms import LlamaCpp
5
 
@@ -74,16 +75,16 @@ callback_manager = CallbackManager([StreamingStdOutCallbackHandler()])
74
 
75
  # Function to create a conversational chain
76
  def create_conversational_chain(database):
77
- model_name = TheBloke/Llama-2-7B-Chat-GGUF
78
- model_directory = models
79
  #Check if the model file exists in the specified directory
80
  model_file = os.path.join(model_directory, model_name)
81
  if os.path.exists(model_file):
82
  model_path = model_file
83
- print(Model file found in the directory. Using the local model file.”)
84
  else:
85
  model_path = model_name
86
- print(Model file not found in the directory. Downloading the model from the repository.”)
87
  #Load the model
88
  model = AutoModelForCausalLM.from_pretrained(model_path)
89
  print(model_path)
 
1
  import PyPDF2
2
  import gradio as gr
3
+ import os
4
  from langchain.text_splitter import RecursiveCharacterTextSplitter
5
  from langchain_community.llms import LlamaCpp
6
 
 
75
 
76
  # Function to create a conversational chain
77
  def create_conversational_chain(database):
78
+ model_name = "TheBloke/Llama-2-7B-Chat-GGUF"
79
+ model_directory = "models"
80
  #Check if the model file exists in the specified directory
81
  model_file = os.path.join(model_directory, model_name)
82
  if os.path.exists(model_file):
83
  model_path = model_file
84
+ print("Model file found in the directory. Using the local model file.")
85
  else:
86
  model_path = model_name
87
+ print("Model file not found in the directory. Downloading the model from the repository."")
88
  #Load the model
89
  model = AutoModelForCausalLM.from_pretrained(model_path)
90
  print(model_path)