Doux Thibault commited on
Commit
025e412
1 Parent(s): 99d115b

add llm to front + api key in dot env

Browse files
Files changed (3) hide show
  1. .env +1 -0
  2. Modules/rag.py +4 -6
  3. app.py +10 -2
.env ADDED
@@ -0,0 +1 @@
 
 
1
+ MISTRAL_API_KEY = "i5jSJkCFNGKfgIztloxTMjfckiFbYBj4"
Modules/rag.py CHANGED
@@ -1,11 +1,10 @@
1
  import os
2
  os.environ['TOKENIZERS_PARALLELISM'] = 'true'
3
- os.environ['MISTRAL_API_KEY'] = "i5jSJkCFNGKfgIztloxTMjfckiFbYBj4"
4
- # os.environ['OPENAI_API_KEY'] = "sk-proj-2WJfO8JpVyrdIeJ8QsO0T3BlbkFJWLhZF1xMlRZVFjNBccWh"
5
- os.environ['TAVILY_API_KEY'] = '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'
6
 
7
  mistral_api_key = os.getenv("MISTRAL_API_KEY")
8
- tavily_api_key = os.getenv("TAVILY_API_KEY")
9
  from langchain_community.document_loaders import PyPDFLoader
10
  from langchain.text_splitter import RecursiveCharacterTextSplitter
11
  from langchain_community.document_loaders import WebBaseLoader
@@ -19,7 +18,6 @@ from langchain_mistralai import ChatMistralAI
19
  from langchain.embeddings.sentence_transformer import SentenceTransformerEmbeddings
20
  from langchain_community.tools import DuckDuckGoSearchRun
21
 
22
-
23
  def load_chunk_persist_pdf() -> Chroma:
24
  pdf_folder_path = "data/pdf_folder/"
25
  documents = []
@@ -30,7 +28,7 @@ def load_chunk_persist_pdf() -> Chroma:
30
  documents.extend(loader.load())
31
  text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=10)
32
  chunked_documents = text_splitter.split_documents(documents)
33
-
34
  vectorstore = Chroma.from_documents(
35
  documents=chunked_documents,
36
  embedding=MistralAIEmbeddings(),
 
1
  import os
2
  os.environ['TOKENIZERS_PARALLELISM'] = 'true'
3
+ from dotenv import load_dotenv
4
+ load_dotenv() # load .env api keys
 
5
 
6
  mistral_api_key = os.getenv("MISTRAL_API_KEY")
7
+
8
  from langchain_community.document_loaders import PyPDFLoader
9
  from langchain.text_splitter import RecursiveCharacterTextSplitter
10
  from langchain_community.document_loaders import WebBaseLoader
 
18
  from langchain.embeddings.sentence_transformer import SentenceTransformerEmbeddings
19
  from langchain_community.tools import DuckDuckGoSearchRun
20
 
 
21
  def load_chunk_persist_pdf() -> Chroma:
22
  pdf_folder_path = "data/pdf_folder/"
23
  documents = []
 
28
  documents.extend(loader.load())
29
  text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=10)
30
  chunked_documents = text_splitter.split_documents(documents)
31
+ os.makedirs("data/chroma_store/", exist_ok=True)
32
  vectorstore = Chroma.from_documents(
33
  documents=chunked_documents,
34
  embedding=MistralAIEmbeddings(),
app.py CHANGED
@@ -2,11 +2,17 @@ import streamlit as st
2
  from st_audiorec import st_audiorec
3
  from Modules.Speech2Text.transcribe import transcribe
4
  import base64
 
 
 
 
 
5
 
6
  st.set_page_config(layout="wide", initial_sidebar_state="collapsed")
7
  # Create two columns
8
  col1, col2 = st.columns(2)
9
  video_uploaded = None
 
10
 
11
  # First column containers
12
  with col1:
@@ -37,8 +43,10 @@ with col1:
37
 
38
  with st.chat_message("assistant"):
39
  # Build answer from LLM
40
- response = " to be DEFINED "# TO DO
41
- st.session_state.messages.append({"role": "assistant", "content": response})
 
 
42
 
43
  st.subheader("Movement Analysis")
44
  # TO DO
 
2
  from st_audiorec import st_audiorec
3
  from Modules.Speech2Text.transcribe import transcribe
4
  import base64
5
+ from langchain_mistralai import ChatMistralAI
6
+ from dotenv import load_dotenv
7
+ load_dotenv() # load .env api keys
8
+ import os
9
+ mistral_api_key = os.getenv("MISTRAL_API_KEY")
10
 
11
  st.set_page_config(layout="wide", initial_sidebar_state="collapsed")
12
  # Create two columns
13
  col1, col2 = st.columns(2)
14
  video_uploaded = None
15
+ llm = ChatMistralAI(model="mistral-large-latest", mistral_api_key=mistral_api_key, temperature=0)
16
 
17
  # First column containers
18
  with col1:
 
43
 
44
  with st.chat_message("assistant"):
45
  # Build answer from LLM
46
+
47
+ response = llm.invoke(st.session_state.messages).content
48
+ st.session_state.messages.append({"role": "assistant", "content": response})
49
+ st.markdown(response)
50
 
51
  st.subheader("Movement Analysis")
52
  # TO DO