kwabs22 commited on
Commit
2877fe7
1 Parent(s): 7e3fb58

more buttons

Browse files
Files changed (1) hide show
  1. app.py +5 -34
app.py CHANGED
@@ -3,38 +3,6 @@ import random
3
  import subprocess
4
  import time
5
 
6
- """
7
- def generate_response(user_message): #Figure Out the parameters later and find a way to get the ram usage
8
- cmd = [
9
- "/app/llama.cpp/main", # Path to the executable
10
- "-m", "/app/llama.cpp/models/stablelm-2-zephyr-1_6b-Q4_0.gguf",
11
- "-p", user_message,
12
- "-n", "400",
13
- "-e"
14
- ]
15
-
16
- # Start the subprocess
17
- process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, bufsize=1)
18
-
19
- start_time = time.time()
20
- alllines = ""
21
-
22
- # Yield each line of output as it becomes available
23
- for line in process.stdout:
24
- alllines += " " + line
25
- elapsed_time = time.time() - start_time # Calculate elapsed time
26
- print(line)
27
- yield f"{alllines} \n\n [Inference time: {elapsed_time:.2f} seconds]"
28
-
29
- # Wait for the subprocess to finish if it hasn't already
30
- process.wait()
31
-
32
- # Check for any errors
33
- if process.returncode != 0:
34
- error_message = process.stderr.read()
35
- print(f"Error: {error_message}")
36
- """
37
-
38
  def generate_response(user_message): #generate_response_token_by_token
39
  cmd = [
40
  "/app/llama.cpp/main", # Path to the executable
@@ -84,6 +52,7 @@ def custom_generate_response(cust_user_message):
84
  CustomPrompts = [
85
  "Write a Class Diagram based on the following text:",
86
  "Write a Pydot code based on the following text:",
 
87
  ]
88
 
89
  with gr.Blocks() as iface:
@@ -92,17 +61,19 @@ with gr.Blocks() as iface:
92
  inputs=gr.Textbox(lines=2, placeholder="Type your message here..."),
93
  outputs="text",
94
  title="Stable LM 2 Zephyr (1.6b) LLama.cpp Interface Test",
95
- description="No Message History for now - Enter your message and get a response. (One sentence every 20s)",
96
  flagging_dir="/usr/src/app/flagged",
97
  )
98
  #gr.Interface(fn=generate_response_token_by_token, inputs=gr.Textbox(lines=2, placeholder='Type prompt here...'), outputs="text", description="More Responsive streaming test")
99
  with gr.Group():
100
- gr.HTML("Test for wrapping generator (20 seconds a piece of the response)")
101
  MainOutput = gr.TextArea(placeholder='Output will show here')
102
  CustomButtonInput = gr.TextArea(lines=1, placeholder='Prompt goes here')
103
  CustomButtonClassDiagram = gr.Button(CustomPrompts[0])
104
  CustomButtonPydotcode = gr.Button(CustomPrompts[1])
 
105
  CustomButtonClassDiagram.click(custom_generate_response, inputs=[CustomButtonInput], outputs=MainOutput)
106
  CustomButtonPydotcode.click(custom_generate_response, inputs=[CustomButtonInput], outputs=MainOutput)
 
107
 
108
  iface.queue().launch(server_name="0.0.0.0", share=True)
 
3
  import subprocess
4
  import time
5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  def generate_response(user_message): #generate_response_token_by_token
7
  cmd = [
8
  "/app/llama.cpp/main", # Path to the executable
 
52
  CustomPrompts = [
53
  "Write a Class Diagram based on the following text:",
54
  "Write a Pydot code based on the following text:",
55
+ "Describe what a standard happy scene in any movie would be planned in great detail, based on the following text:",
56
  ]
57
 
58
  with gr.Blocks() as iface:
 
61
  inputs=gr.Textbox(lines=2, placeholder="Type your message here..."),
62
  outputs="text",
63
  title="Stable LM 2 Zephyr (1.6b) LLama.cpp Interface Test",
64
+ description="No Message History for now - Enter your message and get a response.",
65
  flagging_dir="/usr/src/app/flagged",
66
  )
67
  #gr.Interface(fn=generate_response_token_by_token, inputs=gr.Textbox(lines=2, placeholder='Type prompt here...'), outputs="text", description="More Responsive streaming test")
68
  with gr.Group():
69
+ gr.HTML("Test for wrapping generator (Instead of buttons tabs and dropdowns?)")
70
  MainOutput = gr.TextArea(placeholder='Output will show here')
71
  CustomButtonInput = gr.TextArea(lines=1, placeholder='Prompt goes here')
72
  CustomButtonClassDiagram = gr.Button(CustomPrompts[0])
73
  CustomButtonPydotcode = gr.Button(CustomPrompts[1])
74
+ CustomButtonHappyMovieScene = gr.Button(CustomPrompts[2])
75
  CustomButtonClassDiagram.click(custom_generate_response, inputs=[CustomButtonInput], outputs=MainOutput)
76
  CustomButtonPydotcode.click(custom_generate_response, inputs=[CustomButtonInput], outputs=MainOutput)
77
+ CustomButtonHappyMovieScene.click(custom_generate_response, inputs=[CustomButtonInput], outputs=MainOutput)
78
 
79
  iface.queue().launch(server_name="0.0.0.0", share=True)