bstraehle commited on
Commit
efc66f7
1 Parent(s): 40c50e5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +53 -48
app.py CHANGED
@@ -1,5 +1,5 @@
1
  import gradio as gr
2
- import logging, os, sys, time
3
 
4
  from agent_langchain import agent_langchain
5
  from agent_llamaindex import agent_llamaindex
@@ -9,6 +9,8 @@ from trace import trace_wandb
9
  from dotenv import load_dotenv, find_dotenv
10
  _ = load_dotenv(find_dotenv())
11
 
 
 
12
  AGENT_OFF = "Off"
13
  AGENT_LANGCHAIN = "LangChain"
14
  AGENT_LLAMAINDEX = "LlamaIndex"
@@ -29,59 +31,62 @@ def invoke(openai_api_key, prompt, agent_option):
29
  if (agent_option is None):
30
  raise gr.Error("Use Agent is required.")
31
 
32
- os.environ["OPENAI_API_KEY"] = openai_api_key
33
-
34
- completion = ""
35
- result = ""
36
- callback = ""
37
- err_msg = ""
38
 
39
- try:
40
- start_time_ms = round(time.time() * 1000)
 
 
41
 
42
- if (agent_option == AGENT_LANGCHAIN):
43
- completion, callback = agent_langchain(
44
- config,
45
- prompt
46
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
 
48
- result = completion["output"]
49
- elif (agent_option == AGENT_LLAMAINDEX):
50
- result = agent_llamaindex(
51
- config,
52
- prompt
53
- )
54
- else:
55
- client = OpenAI()
56
 
57
- completion = client.chat.completions.create(
58
- messages = [{"role": "user", "content": prompt}],
59
- model = config["model"],
60
- temperature = config["temperature"]
 
 
 
 
 
 
 
 
 
 
61
  )
62
 
63
- callback = completion.usage
64
- result = completion.choices[0].message.content
65
- except Exception as e:
66
- err_msg = e
67
-
68
- raise gr.Error(e)
69
- finally:
70
- end_time_ms = round(time.time() * 1000)
71
-
72
- trace_wandb(
73
- config,
74
- agent_option,
75
- prompt,
76
- completion,
77
- result,
78
- callback,
79
- err_msg,
80
- start_time_ms,
81
- end_time_ms
82
- )
83
-
84
- return result
85
 
86
  gr.close_all()
87
 
 
1
  import gradio as gr
2
+ import logging, os, sys, threading, time
3
 
4
  from agent_langchain import agent_langchain
5
  from agent_llamaindex import agent_llamaindex
 
9
  from dotenv import load_dotenv, find_dotenv
10
  _ = load_dotenv(find_dotenv())
11
 
12
+ lock = threading.Lock()
13
+
14
  AGENT_OFF = "Off"
15
  AGENT_LANGCHAIN = "LangChain"
16
  AGENT_LLAMAINDEX = "LlamaIndex"
 
31
  if (agent_option is None):
32
  raise gr.Error("Use Agent is required.")
33
 
34
+ with lock:
35
+ os.environ["OPENAI_API_KEY"] = openai_api_key
 
 
 
 
36
 
37
+ completion = ""
38
+ result = ""
39
+ callback = ""
40
+ err_msg = ""
41
 
42
+ try:
43
+ start_time_ms = round(time.time() * 1000)
44
+
45
+ if (agent_option == AGENT_LANGCHAIN):
46
+ completion, callback = agent_langchain(
47
+ config,
48
+ prompt
49
+ )
50
+
51
+ result = completion["output"]
52
+ elif (agent_option == AGENT_LLAMAINDEX):
53
+ result = agent_llamaindex(
54
+ config,
55
+ prompt
56
+ )
57
+ else:
58
+ client = OpenAI()
59
+
60
+ completion = client.chat.completions.create(
61
+ messages = [{"role": "user", "content": prompt}],
62
+ model = config["model"],
63
+ temperature = config["temperature"]
64
+ )
65
 
66
+ callback = completion.usage
67
+ result = completion.choices[0].message.content
68
+ except Exception as e:
69
+ err_msg = e
 
 
 
 
70
 
71
+ raise gr.Error(e)
72
+ finally:
73
+ end_time_ms = round(time.time() * 1000)
74
+
75
+ trace_wandb(
76
+ config,
77
+ agent_option,
78
+ prompt,
79
+ completion,
80
+ result,
81
+ callback,
82
+ err_msg,
83
+ start_time_ms,
84
+ end_time_ms
85
  )
86
 
87
+ del os.environ["OPENAI_API_KEY"]
88
+
89
+ return result
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
90
 
91
  gr.close_all()
92