Anand Sampat commited on
Commit
3d09df9
1 Parent(s): 7c2fc96

initial app commit with basic functionality

Browse files
Files changed (2) hide show
  1. app.py +201 -0
  2. requirements.txt +3 -0
app.py ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import asyncio
3
+ import os
4
+ import time
5
+ from typing import List, Dict
6
+ from openai import OpenAI
7
+ from openai import AsyncOpenAI
8
+
9
+ MAX_SUBCONCEPTS = 25
10
+
11
+ async def call_openai_api(sn_api_key, prompt: str) -> Dict:
12
+ sn_dev_client_async = AsyncOpenAI(
13
+ base_url="https://api.sambanova.ai/v1",
14
+ api_key=sn_api_key
15
+ )
16
+
17
+ response = await sn_dev_client_async.chat.completions.create(
18
+ model="Meta-Llama-3.2-3B-Instruct",
19
+ messages=[
20
+ {
21
+ "role": "system",
22
+ "content": "You are a knowledge graph generator. Generate detailed answers to questions posed about subconcepts and give an educated response as if you were a professor explaining to a student."
23
+ },
24
+ {
25
+ "role": "user",
26
+ "content": f"{prompt}"
27
+ }
28
+ ],
29
+ )
30
+ return response
31
+
32
+ async def make_multiple_openai_calls(sn_api_key, prompts: List[str]) -> List[Dict]:
33
+ tasks = [call_openai_api(sn_api_key, prompt) for prompt in prompts]
34
+ return await asyncio.gather(*tasks)
35
+
36
+ def process_concept(sn_api_key, concept, num_subconcepts, progress=gr.Progress()) -> tuple:
37
+ start_time = time.time()
38
+
39
+ sn_dev_client = OpenAI(
40
+ base_url="https://api.sambanova.ai/v1",
41
+ api_key=sn_api_key
42
+ )
43
+
44
+ progress(0, "Identifying subconcepts")
45
+ # Single API call to break down the concepts
46
+ response = sn_dev_client.chat.completions.create(
47
+ model="Meta-Llama-3.2-3B-Instruct",
48
+ messages=[
49
+ {
50
+ "role": "user",
51
+ "content": f"""Create a set of subconcepts from this concept: {concept}.
52
+ Do this by breaking down the concept into multiple subconcepts with each on a new line along with their questions so it is easy to parse in the following way. Note in the example above there is no additional text except for the subconcepts and new lines.
53
+
54
+ Example (for the concept "machine learning"):
55
+
56
+ gradient descent
57
+ neuron training
58
+ loss function
59
+ optimization functions
60
+ backpropagation
61
+ """
62
+ }
63
+ ],
64
+ )
65
+
66
+ result = response.choices[0].message.content
67
+ # clean up response
68
+ subconcepts = result.strip().split('\n')
69
+ subconcepts = list(set([subconcept.strip() for subconcept in subconcepts]))
70
+ # print(subconcepts)
71
+ num_total_subconcepts = len(subconcepts)
72
+
73
+ progress(0.2, "Preparing subconcepts")
74
+ # limit to 10 requests for now
75
+ lmt = min(num_subconcepts, num_total_subconcepts)
76
+ subconcepts = subconcepts[:lmt]
77
+
78
+ prompts = [
79
+ f"Please give a detailed explanation of this subconcept: {subconcept}" for subconcept in subconcepts
80
+ ]
81
+
82
+ progress(0.3, f"Generating explanations for {len(subconcepts)} subconcepts in parallel")
83
+ # Run the async function in the synchronous context
84
+ loop = asyncio.new_event_loop()
85
+ asyncio.set_event_loop(loop)
86
+ try:
87
+ results = loop.run_until_complete(make_multiple_openai_calls(sn_api_key, prompts))
88
+ finally:
89
+ loop.close()
90
+
91
+ # Extract strings from the results
92
+ responses = [result.choices[0].message.content for result in results]
93
+
94
+ progress(0.6, f"Summarizing explanations to create an intro for {len(subconcepts)} subconcepts")
95
+ # Summarize results - using a synchonous call
96
+ content_to_summarize = ""
97
+ for subconcept, response in zip(subconcepts, responses):
98
+ content_to_summarize += f"## {subconcept.title()}\n\n{response}\n\n---\n\n"
99
+
100
+ response = sn_dev_client.chat.completions.create(
101
+ model="Meta-Llama-3.1-8B-Instruct", # need longer context
102
+ messages=[
103
+ {
104
+ "role": "user",
105
+ "content": f"""Summarize the results for the {concept} by creating an introduction for the class that incorporates
106
+ the subconcepts: {" ".join(subconcepts)}. Here is all of the information you want to summarize:
107
+
108
+ {content_to_summarize}
109
+
110
+ Please present this as an introduction to a class on
111
+ {concept}.
112
+ """
113
+ }
114
+ ],
115
+ )
116
+ print(response)
117
+ intro_summary = response.choices[0].message.content
118
+
119
+ end_time = time.time()
120
+ total_time = end_time - start_time
121
+
122
+ progress(0.9, "Formatting output")
123
+ # Format the output in Markdown
124
+ markdown_intro = f"# Lesson Plan: {concept.title()}\n\n"
125
+ markdown_intro += f"**Number of LLama 3.2 3B calls made to SambaNova's API:** {num_subconcepts + 1}\n\n"
126
+ markdown_intro += f"**1 LLama 3.1 8B call made to SambaNova's API to summarize**\n\n"
127
+ markdown_intro += f"**Total time taken:** {total_time:.2f} seconds\n\n"
128
+ markdown_intro += f"\n\n---\n\n"
129
+ markdown_intro += intro_summary
130
+
131
+ subconcept_markdowns = []
132
+ for subconcept, response in zip(subconcepts, responses):
133
+ subconcept_markdowns.append(f"## {subconcept.title()}\n\n{response}\n\n")
134
+
135
+ progress(1.0, "Complete")
136
+
137
+ # Update the tabs (and its corresponding contents) with content for each lesson
138
+ new_tabs = []
139
+ new_tab_contents = []
140
+ for i in range(len(subconcept_markdowns)):
141
+ new_tabs.append(gr.update(label=f"Lesson {i+1}: {subconcepts[i].title()}", visible=True))
142
+ new_tab_contents.append(gr.Markdown(f"{subconcept_markdowns[i]}"))
143
+ new_tabs.extend([gr.update(visible=False) for _ in range(MAX_SUBCONCEPTS-len(subconcept_markdowns))])
144
+ new_tab_contents.extend([gr.update(visible=False) for _ in range(MAX_SUBCONCEPTS-len(subconcept_markdowns))])
145
+
146
+ return "Process complete!", markdown_intro, *new_tabs, *new_tab_contents
147
+
148
+ with gr.Blocks() as demo:
149
+ gr.Markdown(
150
+ """
151
+ <h1 style="text-align: center;">Lesson Plan Generator</h1>
152
+ <p style="text-align: center; font-size: 20px;">Ever wanted to learn something new? Struggled to break down the concept to more digestible subconcepts? In this demo, we use <a href="https://cloud.sambanova.ai">SambaNova's</a> superfast LLama 3.2 3B and LLama 3.1 8B models to summarize the concept and subconcepts as well as provide a detailed lesson for each of the subconcepts. </p>
153
+ <p style="text-align: center; font-size: 18px;">To use this, follow the instructions below:</p>
154
+ """,
155
+ elem_id="header"
156
+ )
157
+
158
+ gr.Markdown(
159
+ """
160
+ <div style="margin: auto; width: 50%; text-align: left; font-size: 16px">
161
+ <ol>
162
+ <li>Navigate to <a href="https://cloud.sambanova.ai">https://cloud.sambanova.ai</a>, login and copy your API Key</li>
163
+ <li>Paste it in the SambaNova API Key box below</li>
164
+ <li>Enter a concept you are interested in (e.g. Variational Autoencoders)</li>
165
+ <li>Choose the number of subconcepts you want to break your lessons into</li>
166
+ <li>Click 'Generate Lesson Plan'</li>
167
+ <li>Wait for a few seconds for multiple LLama 3B and 8B calls to finish</li>
168
+ <li>Read through and enjoy your lesson plans</li>
169
+ </ol>
170
+ </div>
171
+ """,
172
+ elem_id="instructions"
173
+ )
174
+
175
+ with gr.Column():
176
+ sn_api_key_input = gr.Textbox(label="Enter your SambaNova API Key (https://cloud.sambanova.ai)", type="password")
177
+
178
+ with gr.Row():
179
+ concept_input = gr.Textbox(label="Enter a concept", placeholder="e.g., Artificial Intelligence")
180
+ slider = gr.Slider(minimum=1, maximum=MAX_SUBCONCEPTS, value=5, label="Number of subconcepts", step=1)
181
+ generate_btn = gr.Button("Generate Lesson Plan", variant="primary", size="lg")
182
+
183
+ with gr.Column():
184
+ progress_output = gr.Textbox(label="Progress", interactive=True)
185
+ lesson_intro = gr.Markdown(label="Lesson Intro")
186
+
187
+ tab_contents = []
188
+
189
+ with gr.Tabs() as tabs:
190
+ for i in range(MAX_SUBCONCEPTS): # Initial set of tabs
191
+ with gr.Tab(f"Lesson {i+1}", visible=False):
192
+ exec(f'tab_{i}=gr.Markdown(f"This is content for Lesson {i+1}")')
193
+ exec(f'tab_contents.append(tab_{i})')
194
+
195
+ generate_btn.click(
196
+ process_concept,
197
+ inputs=[sn_api_key_input, concept_input, slider],
198
+ outputs=[progress_output, lesson_intro] + [tabs.children[i] for i in range(MAX_SUBCONCEPTS)] + tab_contents
199
+ )
200
+
201
+ demo.launch()
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ openai>=0.26.4
2
+ gradio>=3.18.0
3
+ asyncio