freddyaboulton HF staff commited on
Commit
d2daf95
·
1 Parent(s): a3b4442

disable ssr mode

Browse files
Files changed (1) hide show
  1. app.py +52 -32
app.py CHANGED
@@ -1,15 +1,28 @@
1
  import os
2
  import gradio as gr
 
3
  from huggingface_hub import InferenceClient
4
  from e2b_code_interpreter import Sandbox
5
  from pathlib import Path
6
  from transformers import AutoTokenizer
7
 
8
- from utils import run_interactive_notebook, create_base_notebook, update_notebook_display
 
 
9
 
 
 
 
10
 
11
- E2B_API_KEY = os.environ['E2B_API_KEY']
12
- HF_TOKEN = os.environ['HF_TOKEN']
 
 
 
 
 
 
 
13
  DEFAULT_MAX_TOKENS = 512
14
  DEFAULT_SYSTEM_PROMPT = """You are a code assistant with access to a ipython interpreter.
15
  You solve tasks step-by-step and rely on code execution results.
@@ -31,11 +44,13 @@ List of available files:
31
  {}"""
32
 
33
 
34
- def execute_jupyter_agent(sytem_prompt, user_input, max_new_tokens, model,files, message_history):
 
 
35
  client = InferenceClient(api_key=HF_TOKEN)
36
 
37
  tokenizer = AutoTokenizer.from_pretrained(model)
38
- #model = "meta-llama/Llama-3.1-8B-Instruct"
39
 
40
  sbx = Sandbox(api_key=E2B_API_KEY)
41
 
@@ -48,16 +63,21 @@ def execute_jupyter_agent(sytem_prompt, user_input, max_new_tokens, model,files,
48
  sbx.files.write(filpath.name, file)
49
  filenames.append(filpath.name)
50
 
51
-
52
-
53
  # Initialize message_history if it doesn't exist
54
- if len(message_history)==0:
55
- message_history.append({"role": "system", "content": sytem_prompt.format("- " + "\n- ".join(filenames))})
 
 
 
 
 
56
  message_history.append({"role": "user", "content": user_input})
57
 
58
  print("history:", message_history)
59
 
60
- for notebook_html, messages in run_interactive_notebook(client, model, tokenizer, message_history, sbx, max_new_tokens=max_new_tokens):
 
 
61
  message_history = messages
62
  yield notebook_html, message_history
63
 
@@ -66,6 +86,7 @@ def clear(state):
66
  state = []
67
  return update_notebook_display(create_base_notebook([])[0]), state
68
 
 
69
  css = """
70
  #component-0 {
71
  height: 100vh;
@@ -86,11 +107,13 @@ css = """
86
  # Create the interface
87
  with gr.Blocks() as demo:
88
  state = gr.State(value=[])
89
-
90
  html_output = gr.HTML(value=update_notebook_display(create_base_notebook([])[0]))
91
-
92
- user_input = gr.Textbox(value="Solve the Lotka-Volterra equation and plot the results.", lines=3)
93
-
 
 
94
  with gr.Row():
95
  generate_btn = gr.Button("Let's go!")
96
  clear_btn = gr.Button("Clear")
@@ -98,13 +121,12 @@ with gr.Blocks() as demo:
98
  with gr.Accordion("Upload files", open=False):
99
  files = gr.File(label="Upload files to use", file_count="multiple")
100
 
101
-
102
  with gr.Accordion("Advanced Settings", open=False):
103
  system_input = gr.Textbox(
104
  label="System Prompt",
105
  value=DEFAULT_SYSTEM_PROMPT,
106
  elem_classes="input-box",
107
- lines=8
108
  )
109
  with gr.Row():
110
  max_tokens = gr.Number(
@@ -113,26 +135,24 @@ with gr.Blocks() as demo:
113
  minimum=128,
114
  maximum=2048,
115
  step=8,
116
- interactive=True
 
 
 
 
 
 
 
 
 
117
  )
118
-
119
- model = gr.Dropdown(value="meta-llama/Llama-3.1-8B-Instruct",
120
- choices=[
121
- "meta-llama/Llama-3.2-3B-Instruct",
122
- "meta-llama/Llama-3.1-8B-Instruct",
123
- "meta-llama/Llama-3.1-70B-Instruct"]
124
- )
125
-
126
  generate_btn.click(
127
  fn=execute_jupyter_agent,
128
  inputs=[system_input, user_input, max_tokens, model, files, state],
129
- outputs=[html_output, state]
130
  )
131
 
132
- clear_btn.click(
133
- fn=clear,
134
- inputs=[state],
135
- outputs=[html_output, state]
136
- )
137
 
138
- demo.launch()
 
1
  import os
2
  import gradio as gr
3
+ from gradio.utils import get_space
4
  from huggingface_hub import InferenceClient
5
  from e2b_code_interpreter import Sandbox
6
  from pathlib import Path
7
  from transformers import AutoTokenizer
8
 
9
+ if not get_space():
10
+ try:
11
+ from dotenv import load_dotenv
12
 
13
+ load_dotenv()
14
+ except (ImportError, ModuleNotFoundError):
15
+ pass
16
 
17
+
18
+ from utils import (
19
+ run_interactive_notebook,
20
+ create_base_notebook,
21
+ update_notebook_display,
22
+ )
23
+
24
+ E2B_API_KEY = os.environ["E2B_API_KEY"]
25
+ HF_TOKEN = os.environ["HF_TOKEN"]
26
  DEFAULT_MAX_TOKENS = 512
27
  DEFAULT_SYSTEM_PROMPT = """You are a code assistant with access to a ipython interpreter.
28
  You solve tasks step-by-step and rely on code execution results.
 
44
  {}"""
45
 
46
 
47
+ def execute_jupyter_agent(
48
+ sytem_prompt, user_input, max_new_tokens, model, files, message_history
49
+ ):
50
  client = InferenceClient(api_key=HF_TOKEN)
51
 
52
  tokenizer = AutoTokenizer.from_pretrained(model)
53
+ # model = "meta-llama/Llama-3.1-8B-Instruct"
54
 
55
  sbx = Sandbox(api_key=E2B_API_KEY)
56
 
 
63
  sbx.files.write(filpath.name, file)
64
  filenames.append(filpath.name)
65
 
 
 
66
  # Initialize message_history if it doesn't exist
67
+ if len(message_history) == 0:
68
+ message_history.append(
69
+ {
70
+ "role": "system",
71
+ "content": sytem_prompt.format("- " + "\n- ".join(filenames)),
72
+ }
73
+ )
74
  message_history.append({"role": "user", "content": user_input})
75
 
76
  print("history:", message_history)
77
 
78
+ for notebook_html, messages in run_interactive_notebook(
79
+ client, model, tokenizer, message_history, sbx, max_new_tokens=max_new_tokens
80
+ ):
81
  message_history = messages
82
  yield notebook_html, message_history
83
 
 
86
  state = []
87
  return update_notebook_display(create_base_notebook([])[0]), state
88
 
89
+
90
  css = """
91
  #component-0 {
92
  height: 100vh;
 
107
  # Create the interface
108
  with gr.Blocks() as demo:
109
  state = gr.State(value=[])
110
+
111
  html_output = gr.HTML(value=update_notebook_display(create_base_notebook([])[0]))
112
+
113
+ user_input = gr.Textbox(
114
+ value="Solve the Lotka-Volterra equation and plot the results.", lines=3
115
+ )
116
+
117
  with gr.Row():
118
  generate_btn = gr.Button("Let's go!")
119
  clear_btn = gr.Button("Clear")
 
121
  with gr.Accordion("Upload files", open=False):
122
  files = gr.File(label="Upload files to use", file_count="multiple")
123
 
 
124
  with gr.Accordion("Advanced Settings", open=False):
125
  system_input = gr.Textbox(
126
  label="System Prompt",
127
  value=DEFAULT_SYSTEM_PROMPT,
128
  elem_classes="input-box",
129
+ lines=8,
130
  )
131
  with gr.Row():
132
  max_tokens = gr.Number(
 
135
  minimum=128,
136
  maximum=2048,
137
  step=8,
138
+ interactive=True,
139
+ )
140
+
141
+ model = gr.Dropdown(
142
+ value="meta-llama/Llama-3.1-8B-Instruct",
143
+ choices=[
144
+ "meta-llama/Llama-3.2-3B-Instruct",
145
+ "meta-llama/Llama-3.1-8B-Instruct",
146
+ "meta-llama/Llama-3.1-70B-Instruct",
147
+ ],
148
  )
149
+
 
 
 
 
 
 
 
150
  generate_btn.click(
151
  fn=execute_jupyter_agent,
152
  inputs=[system_input, user_input, max_tokens, model, files, state],
153
+ outputs=[html_output, state],
154
  )
155
 
156
+ clear_btn.click(fn=clear, inputs=[state], outputs=[html_output, state])
 
 
 
 
157
 
158
+ demo.launch(ssr_mode=False)