Spaces:
Sleeping
Sleeping
Synced repo using 'sync_with_huggingface' Github Action
Browse files
app.py
CHANGED
@@ -4,8 +4,20 @@ from custom_prompt import TexRestructureTemplate,MetadataTemplate
|
|
4 |
import ast
|
5 |
from gpt import get_chat_completion
|
6 |
import openai
|
7 |
-
|
8 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
def main():
|
10 |
st.sidebar.markdown("""
|
11 |
<style>
|
@@ -68,13 +80,11 @@ def main():
|
|
68 |
|
69 |
# Get the link input from the user
|
70 |
link = st.text_input("Enter the link to the JEE Main physics question:")
|
71 |
-
openAiKey = st.text_input(label="Input the openai key", type="password")
|
72 |
if st.button("Submit"):
|
73 |
if link:
|
74 |
try:
|
75 |
ques,ans = parsing.parse(link)
|
76 |
print("Checkpoint-1")
|
77 |
-
openai.api_key = openAiKey
|
78 |
restructure_prompt = TexRestructureTemplate()
|
79 |
q_restruct_prompt = restructure_prompt.format(content=ques)
|
80 |
question = get_chat_completion(q_restruct_prompt)
|
|
|
4 |
import ast
|
5 |
from gpt import get_chat_completion
|
6 |
import openai
|
7 |
+
openAiKey = st.text_input(label="Input the openai key", type="password")
|
8 |
+
openai.api_key = openAiKey
|
9 |
+
def get_chat_completion(prompt, model="gpt-3.5-turbo"):
|
10 |
+
try:
|
11 |
+
response = openai.ChatCompletion.create(
|
12 |
+
model=model,
|
13 |
+
messages=[
|
14 |
+
{"role": "system", "content": "You are a helpful assistant."},
|
15 |
+
{"role": "user", "content": prompt}
|
16 |
+
]
|
17 |
+
)
|
18 |
+
return response['choices'][0]['message']['content']
|
19 |
+
except Exception as e:
|
20 |
+
return str(e)
|
21 |
def main():
|
22 |
st.sidebar.markdown("""
|
23 |
<style>
|
|
|
80 |
|
81 |
# Get the link input from the user
|
82 |
link = st.text_input("Enter the link to the JEE Main physics question:")
|
|
|
83 |
if st.button("Submit"):
|
84 |
if link:
|
85 |
try:
|
86 |
ques,ans = parsing.parse(link)
|
87 |
print("Checkpoint-1")
|
|
|
88 |
restructure_prompt = TexRestructureTemplate()
|
89 |
q_restruct_prompt = restructure_prompt.format(content=ques)
|
90 |
question = get_chat_completion(q_restruct_prompt)
|