Spaces:
Runtime error
Runtime error
Errolmking
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -1,71 +1,27 @@
|
|
1 |
-
#
|
2 |
-
|
3 |
-
from langchain.vectorstores import FAISS
|
4 |
-
from langchain.embeddings.openai import OpenAIEmbeddings
|
5 |
-
from langchain.memory.simple import SimpleMemory
|
6 |
-
|
7 |
-
from langchain.chains import ConversationChain, LLMChain, SequentialChain
|
8 |
-
from langchain.memory import ConversationBufferMemory
|
9 |
-
|
10 |
-
from langchain.prompts import ChatPromptTemplate, PromptTemplate
|
11 |
-
from langchain.document_loaders import UnstructuredFileLoader
|
12 |
|
|
|
|
|
13 |
from langchain.chat_models import ChatOpenAI
|
14 |
-
from langchain.
|
15 |
-
from langchain.memory import ConversationSummaryMemory
|
16 |
-
|
17 |
-
from langchain.callbacks import PromptLayerCallbackHandler
|
18 |
-
from langchain.prompts.chat import (
|
19 |
-
ChatPromptTemplate,
|
20 |
-
SystemMessagePromptTemplate,
|
21 |
-
AIMessagePromptTemplate,
|
22 |
-
HumanMessagePromptTemplate,
|
23 |
-
)
|
24 |
-
|
25 |
-
from langchain.schema import AIMessage, HumanMessage, SystemMessage
|
26 |
-
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
|
27 |
-
from langchain.callbacks.base import BaseCallbackHandler
|
28 |
-
import gradio as gr
|
29 |
|
30 |
-
|
31 |
-
from
|
32 |
-
|
33 |
-
|
34 |
-
from
|
35 |
-
from langchain.callbacks.base import BaseCallbackHandler
|
36 |
-
|
37 |
-
import itertools
|
38 |
-
import time
|
39 |
-
import os
|
40 |
-
import getpass
|
41 |
import json
|
|
|
42 |
import sys
|
43 |
-
|
|
|
44 |
|
45 |
-
import
|
46 |
-
import
|
47 |
import gradio as gr
|
48 |
|
49 |
-
|
50 |
-
|
51 |
-
#Load the FAISS Model ( vector )
|
52 |
-
openai.api_key = os.environ["OPENAI_API_KEY"]
|
53 |
-
db = FAISS.load_local("db", OpenAIEmbeddings())
|
54 |
-
|
55 |
-
#API Keys
|
56 |
-
promptlayer.api_key = os.environ["PROMPTLAYER"]
|
57 |
-
|
58 |
-
MODEL = "gpt-3.5-turbo"
|
59 |
-
# MODEL = "gpt-4"
|
60 |
-
|
61 |
-
from langchain.callbacks import PromptLayerCallbackHandler
|
62 |
-
from langchain.prompts.chat import (
|
63 |
-
ChatPromptTemplate,
|
64 |
-
SystemMessagePromptTemplate,
|
65 |
-
AIMessagePromptTemplate,
|
66 |
-
HumanMessagePromptTemplate,
|
67 |
-
)
|
68 |
-
from langchain.memory import ConversationSummaryMemory
|
69 |
|
70 |
# Defined a QueueCallback, which takes as a Queue object during initialization. Each new token is pushed to the queue.
|
71 |
class QueueCallback(BaseCallbackHandler):
|
@@ -80,8 +36,6 @@ class QueueCallback(BaseCallbackHandler):
|
|
80 |
def on_llm_end(self, *args, **kwargs: Any) -> None:
|
81 |
return self.q.empty()
|
82 |
|
83 |
-
MODEL = "gpt-3.5-turbo-16k"
|
84 |
-
|
85 |
# Defined a QueueCallback, which takes as a Queue object during initialization. Each new token is pushed to the queue.
|
86 |
class QueueCallback(BaseCallbackHandler):
|
87 |
"""Callback handler for streaming LLM responses to a queue."""
|
@@ -97,9 +51,10 @@ class QueueCallback(BaseCallbackHandler):
|
|
97 |
|
98 |
class Agent:
|
99 |
|
100 |
-
def __init__(self, prompt_template='', model_name=MODEL, verbose=
|
101 |
self.verbose = verbose
|
102 |
self.llm = ChatOpenAI(
|
|
|
103 |
model_name=MODEL,
|
104 |
temperature=temp
|
105 |
)
|
@@ -122,8 +77,7 @@ class Agent:
|
|
122 |
|
123 |
llm = ChatOpenAI(
|
124 |
model_name=MODEL,
|
125 |
-
callbacks=[QueueCallback(q),
|
126 |
-
PromptLayerCallbackHandler(pl_tags=["unit-generator"])],
|
127 |
streaming=True,
|
128 |
)
|
129 |
|
@@ -160,9 +114,7 @@ unit_generator_prompt = """
|
|
160 |
$$$
|
161 |
{input}
|
162 |
$$$
|
163 |
-
|
164 |
Do the following:
|
165 |
-
|
166 |
Enduring Ideas: Make a list of 5 big and enduring ideas that students should walk away with.
|
167 |
Essential Questions: Make a list of 5 essential questions we want students to think about. These questions should be open-ended and provocative. Written in "kid friendly" language. Designed to focus instruction for uncovering the important ideas of the content.
|
168 |
Key Concepts: Make a list of 5 ideas, concepts, generalizations and principles we want students to know and understand about the unit or topic we are teaching?
|
@@ -225,9 +177,7 @@ with app:
|
|
225 |
gr.Markdown(
|
226 |
"""
|
227 |
# Syllabo
|
228 |
-
|
229 |
A suite of generative ai tools for designing and building learning experiences.
|
230 |
-
|
231 |
""")
|
232 |
|
233 |
with gr.Tab("Unit Builder"):
|
|
|
1 |
+
#0.0.1
|
2 |
+
import openai
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
|
4 |
+
#Langchain Imports
|
5 |
+
from langchain.prompts import ChatPromptTemplate
|
6 |
from langchain.chat_models import ChatOpenAI
|
7 |
+
from langchain.schema import StrOutputParser
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
|
9 |
+
#Type Hinting
|
10 |
+
from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Type, Union
|
11 |
+
|
12 |
+
#Utils
|
13 |
+
from operator import itemgetter
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
import json
|
15 |
+
from dataclasses import dataclass
|
16 |
import sys
|
17 |
+
import logging
|
18 |
+
from google.colab import userdata
|
19 |
|
20 |
+
from queue import Queue, Empty
|
21 |
+
from threading import Thread
|
22 |
import gradio as gr
|
23 |
|
24 |
+
MODEL = "gpt-3.5-turbo-16k"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
|
26 |
# Defined a QueueCallback, which takes as a Queue object during initialization. Each new token is pushed to the queue.
|
27 |
class QueueCallback(BaseCallbackHandler):
|
|
|
36 |
def on_llm_end(self, *args, **kwargs: Any) -> None:
|
37 |
return self.q.empty()
|
38 |
|
|
|
|
|
39 |
# Defined a QueueCallback, which takes as a Queue object during initialization. Each new token is pushed to the queue.
|
40 |
class QueueCallback(BaseCallbackHandler):
|
41 |
"""Callback handler for streaming LLM responses to a queue."""
|
|
|
51 |
|
52 |
class Agent:
|
53 |
|
54 |
+
def __init__(self, prompt_template='', model_name=MODEL, verbose=True, temp=0.2):
|
55 |
self.verbose = verbose
|
56 |
self.llm = ChatOpenAI(
|
57 |
+
openai_api_key=api_key,
|
58 |
model_name=MODEL,
|
59 |
temperature=temp
|
60 |
)
|
|
|
77 |
|
78 |
llm = ChatOpenAI(
|
79 |
model_name=MODEL,
|
80 |
+
callbacks=[QueueCallback(q)],
|
|
|
81 |
streaming=True,
|
82 |
)
|
83 |
|
|
|
114 |
$$$
|
115 |
{input}
|
116 |
$$$
|
|
|
117 |
Do the following:
|
|
|
118 |
Enduring Ideas: Make a list of 5 big and enduring ideas that students should walk away with.
|
119 |
Essential Questions: Make a list of 5 essential questions we want students to think about. These questions should be open-ended and provocative. Written in "kid friendly" language. Designed to focus instruction for uncovering the important ideas of the content.
|
120 |
Key Concepts: Make a list of 5 ideas, concepts, generalizations and principles we want students to know and understand about the unit or topic we are teaching?
|
|
|
177 |
gr.Markdown(
|
178 |
"""
|
179 |
# Syllabo
|
|
|
180 |
A suite of generative ai tools for designing and building learning experiences.
|
|
|
181 |
""")
|
182 |
|
183 |
with gr.Tab("Unit Builder"):
|