kenken999 commited on
Commit
1afbeb8
โ€ข
1 Parent(s): 354f15c
Files changed (31) hide show
  1. mysite/asgi copy 2.py +59 -0
  2. mysite/asgi.py +2 -1
  3. mysite/gradio_config.py +45 -10
  4. mysite/interpreter_config.py +28 -0
  5. mysite/utilities.py +1 -0
  6. workspace/utils/db.py โ†’ routers/chat/__init__.py +0 -0
  7. routers/chat/chat.py +180 -0
  8. routers/chat/demo3.py +18 -0
  9. workspace/{database.db โ†’ backup/database.db} +0 -0
  10. workspace/{db.py โ†’ backup/db.py} +0 -0
  11. workspace/{hello.txt โ†’ backup/hello.txt} +0 -0
  12. workspace/{index.html โ†’ backup/index.html} +0 -0
  13. workspace/{init_db โ†’ backup/init_db} +0 -0
  14. workspace/{main.py โ†’ backup/main.py} +0 -0
  15. workspace/{mydatabase โ†’ backup/mydatabase} +0 -0
  16. workspace/{mydatabase.duckdb โ†’ backup/mydatabase.duckdb} +0 -0
  17. workspace/{mydatabase.wal โ†’ backup/mydatabase.wal} +0 -0
  18. workspace/{mydb.duckdb โ†’ backup/mydb.duckdb} +0 -0
  19. workspace/{new_file.py โ†’ backup/new_file.py} +0 -0
  20. workspace/{sample.csv โ†’ backup/sample.csv} +0 -0
  21. workspace/{sample.duckdb โ†’ backup/sample.duckdb} +0 -0
  22. workspace/{sample.html โ†’ backup/sample.html} +0 -0
  23. workspace/{test.csv โ†’ backup/test.csv} +0 -0
  24. workspace/{test.txt โ†’ backup/test.txt} +0 -0
  25. workspace/{test_file.py โ†’ backup/test_file.py} +0 -0
  26. workspace/{test_file.txt โ†’ backup/test_file.txt} +0 -0
  27. workspace/{utils/groq_api.py โ†’ backup/utils/db.py} +0 -0
  28. workspace/backup/utils/groq_api.py +0 -0
  29. workspace/{workspace โ†’ backup/workspace}/index.html +0 -0
  30. workspace/{workspace โ†’ backup/workspace}/prompts.db +0 -0
  31. workspace/{your_module.py โ†’ backup/your_module.py} +0 -0
mysite/asgi copy 2.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from django.core.asgi import get_asgi_application
3
+ from fastapi import FastAPI,Request
4
+ from fastapi.staticfiles import StaticFiles
5
+ from fastapi.templating import Jinja2Templates
6
+
7
+ from starlette.middleware.cors import CORSMiddleware
8
+ import logging
9
+ import gradio as gr
10
+ from mysite.gradio_config import setup_gradio_interfaces
11
+ from mysite.webhook import setup_webhook_routes
12
+ from mysite.database import setup_database_routes
13
+ from mysite.asgi_config import init_django_app
14
+
15
+ # ใƒญใ‚ฌใƒผใฎ่จญๅฎš
16
+ logging.basicConfig(level=logging.INFO)
17
+ logger = logging.getLogger(__name__)
18
+ file_handler = logging.FileHandler("app.log")
19
+ file_handler.setLevel(logging.INFO)
20
+ formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
21
+ file_handler.setFormatter(formatter)
22
+ logger.addHandler(file_handler)
23
+
24
+ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings")
25
+ application = get_asgi_application()
26
+
27
+ app = FastAPI()
28
+
29
+ # Djangoใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใฎๅˆๆœŸๅŒ–
30
+ init_django_app(app, application)
31
+
32
+ # ใƒŸใƒ‰ใƒซใ‚ฆใ‚งใ‚ขใฎ่จญๅฎš
33
+ app.add_middleware(
34
+ CORSMiddleware,
35
+ allow_origins=["*"],
36
+ allow_credentials=True,
37
+ allow_methods=["*"],
38
+ allow_headers=["*"],
39
+ )
40
+
41
+ # Gradioใ‚คใƒณใ‚ฟใƒผใƒ•ใ‚งใƒผใ‚นใฎ่จญๅฎš
42
+ gradio_interfaces = setup_gradio_interfaces()
43
+
44
+ # Webhookใƒซใƒผใƒˆใฎ่จญๅฎš
45
+ setup_webhook_routes(app)
46
+
47
+ # ใƒ‡ใƒผใ‚ฟใƒ™ใƒผใ‚นใƒซใƒผใƒˆใฎ่จญๅฎš
48
+ setup_database_routes(app)
49
+
50
+ # Gradioใ‚ขใƒ—ใƒชใฎใƒžใ‚ฆใƒณใƒˆ
51
+ app.mount("/static", StaticFiles(directory="static", html=True), name="static")
52
+ app = gr.mount_gradio_app(app, gradio_interfaces, "/")
53
+
54
+ # ใƒ†ใƒณใƒ—ใƒฌใƒผใƒˆใƒ•ใ‚กใ‚คใƒซใŒๆ ผ็ดใ•ใ‚Œใฆใ„ใ‚‹ใƒ‡ใ‚ฃใƒฌใ‚ฏใƒˆใƒชใ‚’ๆŒ‡ๅฎš
55
+ templates = Jinja2Templates(directory="static")
56
+
57
+ @app.get("/test")
58
+ def get_some_page(request: Request):
59
+ return templates.TemplateResponse("index.html", {"request": request})
mysite/asgi.py CHANGED
@@ -11,7 +11,8 @@ from mysite.gradio_config import setup_gradio_interfaces
11
  from mysite.webhook import setup_webhook_routes
12
  from mysite.database import setup_database_routes
13
  from mysite.asgi_config import init_django_app
14
-
 
15
  # ใƒญใ‚ฌใƒผใฎ่จญๅฎš
16
  logging.basicConfig(level=logging.INFO)
17
  logger = logging.getLogger(__name__)
 
11
  from mysite.webhook import setup_webhook_routes
12
  from mysite.database import setup_database_routes
13
  from mysite.asgi_config import init_django_app
14
+ from interpreter import interpreter
15
+ import mysite.interpreter_config # ใ‚คใƒณใƒใƒผใƒˆใ™ใ‚‹ใ ใ‘ใง่จญๅฎšใŒ้ฉ็”จใ•ใ‚Œใพใ™
16
  # ใƒญใ‚ฌใƒผใฎ่จญๅฎš
17
  logging.basicConfig(level=logging.INFO)
18
  logger = logging.getLogger(__name__)
mysite/gradio_config.py CHANGED
@@ -1,5 +1,43 @@
1
  import gradio as gr
2
  from mysite.utilities import chat_with_interpreter, completion, process_file
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
 
4
  def setup_gradio_interfaces():
5
  chat_interface = gr.ChatInterface(
@@ -16,15 +54,7 @@ def setup_gradio_interfaces():
16
  )
17
  chat_interface2.queue()
18
 
19
- demo4 = gr.ChatInterface(
20
- chat_with_interpreter,
21
- additional_inputs=[
22
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
23
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
24
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
25
- gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
26
- ],
27
- )
28
 
29
  democs = gr.Interface(
30
  fn=process_file,
@@ -40,6 +70,11 @@ def setup_gradio_interfaces():
40
  default_interfaces = [demo4,democs]
41
  default_names = ["demo4","ไป•ๆง˜ๆ›ธใ‹ใ‚‰ไฝœๆˆ"]
42
 
43
- tabs = gr.TabbedInterface(default_interfaces, default_names)
 
 
 
 
 
44
  tabs.queue()
45
  return tabs
 
1
  import gradio as gr
2
  from mysite.utilities import chat_with_interpreter, completion, process_file
3
+ from interpreter import interpreter
4
+ import mysite.interpreter_config # ใ‚คใƒณใƒใƒผใƒˆใ™ใ‚‹ใ ใ‘ใง่จญๅฎšใŒ้ฉ็”จใ•ใ‚Œใพใ™
5
+ import importlib
6
+ import os
7
+ import pkgutil
8
+ from routers.chat.chat import demo44 as demo4
9
+
10
+ def include_gradio_interfaces():
11
+ package_dir = "/home/user/app/routers"
12
+ gradio_interfaces = []
13
+ gradio_names = set()
14
+
15
+ for module_info in pkgutil.walk_packages([package_dir], "routers."):
16
+ sub_module_name = module_info.name
17
+ try:
18
+ print(f"Trying to import {sub_module_name}")
19
+ module = importlib.import_module(sub_module_name)
20
+ if hasattr(module, "gradio_interface"):
21
+ print(f"Found gradio_interface in {sub_module_name}")
22
+ interface_name = module_info.name.split(".")[-1]
23
+ if interface_name not in gradio_names:
24
+ gradio_interfaces.append(module.gradio_interface)
25
+ gradio_names.add(interface_name)
26
+ else:
27
+ unique_name = f"{interface_name}_{len(gradio_names)}"
28
+ gradio_interfaces.append(module.gradio_interface)
29
+ gradio_names.add(unique_name)
30
+ except ModuleNotFoundError:
31
+ print(f"ModuleNotFoundError: {sub_module_name}")
32
+ pass
33
+ except Exception as e:
34
+ print(f"Failed to import {sub_module_name}: {e}")
35
+
36
+ print(f"Collected Gradio Interfaces: {gradio_names}")
37
+ return gradio_interfaces, list(gradio_names)
38
+
39
+
40
+
41
 
42
  def setup_gradio_interfaces():
43
  chat_interface = gr.ChatInterface(
 
54
  )
55
  chat_interface2.queue()
56
 
57
+
 
 
 
 
 
 
 
 
58
 
59
  democs = gr.Interface(
60
  fn=process_file,
 
70
  default_interfaces = [demo4,democs]
71
  default_names = ["demo4","ไป•ๆง˜ๆ›ธใ‹ใ‚‰ไฝœๆˆ"]
72
 
73
+ gradio_interfaces, gradio_names = include_gradio_interfaces()
74
+
75
+ all_interfaces = default_interfaces + gradio_interfaces
76
+ all_names = default_names + gradio_names
77
+
78
+ tabs = gr.TabbedInterface(all_interfaces, all_names)
79
  tabs.queue()
80
  return tabs
mysite/interpreter_config.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # interpreter_config.py
2
+
3
+ import os
4
+ from interpreter import interpreter
5
+
6
+ # ็’ฐๅขƒๅค‰ๆ•ฐใงOpenAI APIใ‚ญใƒผใ‚’ไฟๅญ˜ใŠใ‚ˆใณไฝฟ็”จ
7
+ interpreter.auto_run = True
8
+ interpreter.llm.model = "huggingface/meta-llama/Meta-Llama-3-8B-Instruct"
9
+ interpreter.llm.api_key = os.getenv("hf_token")
10
+ interpreter.llm.api_base = "https://api.groq.com/openai/v1"
11
+ interpreter.llm.api_key = os.getenv("api_key")
12
+ interpreter.llm.model = "Llama3-70b-8192"
13
+
14
+ # interpreter.llm.fp16 = False # ๆ˜Ž็คบ็š„ใซFP32ใ‚’ไฝฟ็”จใ™ใ‚‹ใ‚ˆใ†ใซ่จญๅฎš
15
+ # interpreter --conversations
16
+ # LLM่จญๅฎšใฎ้ฉ็”จ
17
+ interpreter.llm.context_window = 4096 # ไธ€่ˆฌ็š„ใชLLMใฎใ‚ณใƒณใƒ†ใ‚ญใ‚นใƒˆใ‚ฆใ‚ฃใƒณใƒ‰ใ‚ฆใ‚ตใ‚คใ‚บ
18
+ interpreter.context_window = 4096 # ไธ€่ˆฌ็š„ใชLLMใฎใ‚ณใƒณใƒ†ใ‚ญใ‚นใƒˆใ‚ฆใ‚ฃใƒณใƒ‰ใ‚ฆใ‚ตใ‚คใ‚บ
19
+
20
+ interpreter.llm.max_tokens = 3000 # 1ๅ›žใฎใƒชใ‚ฏใ‚จใ‚นใƒˆใงๅ‡ฆ็†ใ™ใ‚‹ใƒˆใƒผใ‚ฏใƒณใฎๆœ€ๅคงๆ•ฐ
21
+ interpreter.max_tokens = 3000 # 1ๅ›žใฎใƒชใ‚ฏใ‚จใ‚นใƒˆใงๅ‡ฆ็†ใ™ใ‚‹ใƒˆใƒผใ‚ฏใƒณใฎๆœ€ๅคงๆ•ฐ
22
+
23
+ interpreter.llm.max_output = 10000 # ๅ‡บๅŠ›ใฎๆœ€ๅคงใƒˆใƒผใ‚ฏใƒณๆ•ฐ
24
+ interpreter.max_output = 10000 # ๅ‡บๅŠ›ใฎๆœ€ๅคงใƒˆใƒผใ‚ฏใƒณๆ•ฐ
25
+
26
+ interpreter.conversation_history = True
27
+ interpreter.debug_mode = False
28
+ # interpreter.temperature = 0.7
mysite/utilities.py CHANGED
@@ -5,6 +5,7 @@ import base64
5
  import subprocess
6
  import time
7
 
 
8
  def validate_signature(body: str, signature: str, secret: str) -> bool:
9
  hash = hmac.new(secret.encode("utf-8"), body.encode("utf-8"), hashlib.sha256).digest()
10
  expected_signature = base64.b64encode(hash).decode("utf-8")
 
5
  import subprocess
6
  import time
7
 
8
+
9
  def validate_signature(body: str, signature: str, secret: str) -> bool:
10
  hash = hmac.new(secret.encode("utf-8"), body.encode("utf-8"), hashlib.sha256).digest()
11
  expected_signature = base64.b64encode(hash).decode("utf-8")
workspace/utils/db.py โ†’ routers/chat/__init__.py RENAMED
File without changes
routers/chat/chat.py ADDED
@@ -0,0 +1,180 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from mysite.utilities import chat_with_interpreter, completion, process_file
3
+ from interpreter import interpreter
4
+ import mysite.interpreter_config # ใ‚คใƒณใƒใƒผใƒˆใ™ใ‚‹ใ ใ‘ใง่จญๅฎšใŒ้ฉ็”จใ•ใ‚Œใพใ™
5
+ import duckdb
6
+
7
+ def format_response(chunk, full_response):
8
+ # Message
9
+ if chunk["type"] == "message":
10
+ full_response += chunk.get("content", "")
11
+ if chunk.get("end", False):
12
+ full_response += "\n"
13
+
14
+ # Code
15
+ if chunk["type"] == "code":
16
+ if chunk.get("start", False):
17
+ full_response += "```python\n"
18
+ full_response += chunk.get("content", "").replace("`", "")
19
+ if chunk.get("end", False):
20
+ full_response += "\n```\n"
21
+
22
+ # Output
23
+ if chunk["type"] == "confirmation":
24
+ if chunk.get("start", False):
25
+ full_response += "```python\n"
26
+ full_response += chunk.get("content", {}).get("code", "")
27
+ if chunk.get("end", False):
28
+ full_response += "```\n"
29
+
30
+ # Console
31
+ if chunk["type"] == "console":
32
+ if chunk.get("start", False):
33
+ full_response += "```python\n"
34
+ if chunk.get("format", "") == "active_line":
35
+ console_content = chunk.get("content", "")
36
+ if console_content is None:
37
+ full_response += "No output available on console."
38
+ if chunk.get("format", "") == "output":
39
+ console_content = chunk.get("content", "")
40
+ full_response += console_content
41
+ if chunk.get("end", False):
42
+ full_response += "\n```\n"
43
+
44
+ # Image
45
+ if chunk["type"] == "image":
46
+ if chunk.get("start", False) or chunk.get("end", False):
47
+ full_response += "\n"
48
+ else:
49
+ image_format = chunk.get("format", "")
50
+ if image_format == "base64.png":
51
+ image_content = chunk.get("content", "")
52
+ if image_content:
53
+ image = Image.open(BytesIO(base64.b64decode(image_content)))
54
+ new_image = Image.new("RGB", image.size, "white")
55
+ new_image.paste(image, mask=image.split()[3])
56
+ buffered = BytesIO()
57
+ new_image.save(buffered, format="PNG")
58
+ img_str = base64.b64encode(buffered.getvalue()).decode()
59
+ full_response += f"![Image](data:image/png;base64,{img_str})\n"
60
+
61
+ return full_response
62
+
63
+
64
+ # Set the environment variable.
65
+ def chat_with_interpreter(
66
+ message, history, a=None, b=None, c=None, d=None
67
+ ): # , openai_api_key):
68
+ # Set the API key for the interpreter
69
+ # interpreter.llm.api_key = openai_api_key
70
+ if message == "reset":
71
+ interpreter.reset()
72
+ return "Interpreter reset", history
73
+ full_response = ""
74
+ # add_conversation(history,20)
75
+ user_entry = {"role": "user", "type": "message", "content": message}
76
+ #messages.append(user_entry)
77
+ # Call interpreter.chat and capture the result
78
+ # message = message + "\nใ‚ทใƒณใ‚ฟใƒƒใ‚ฏใ‚นใ‚’็ขบ่ชใ—ใฆใใ ใ•ใ„ใ€‚"
79
+ # result = interpreter.chat(message)
80
+ for chunk in interpreter.chat(message, display=False, stream=True):
81
+ # print(chunk)
82
+ # output = '\n'.join(item['content'] for item in result if 'content' in item)
83
+ full_response = format_response(chunk, full_response)
84
+ yield full_response # chunk.get("content", "")
85
+
86
+ # Extract the 'content' field from all elements in the result
87
+ """
88
+ if isinstance(result, list):
89
+ for item in result:
90
+ if 'content' in item:
91
+ #yield item['content']#, history
92
+ output = '\n'.join(item['content'] for item in result if 'content' in item)
93
+ else:
94
+ #yield str(result)#, history
95
+ output = str(result)
96
+ """
97
+
98
+ age = 28
99
+ con = duckdb.connect(database="./workspace/sample.duckdb")
100
+ con.execute(
101
+ """
102
+ CREATE SEQUENCE IF NOT EXISTS sample_id_seq START 1;
103
+ CREATE TABLE IF NOT EXISTS samples (
104
+ id INTEGER DEFAULT nextval('sample_id_seq'),
105
+ name VARCHAR,
106
+ age INTEGER,
107
+ PRIMARY KEY(id)
108
+ );
109
+ """
110
+ )
111
+ cur = con.cursor()
112
+ con.execute("INSERT INTO samples (name, age) VALUES (?, ?)", (full_response, age))
113
+ con.execute("INSERT INTO samples (name, age) VALUES (?, ?)", (message, age))
114
+ # ใƒ‡ใƒผใ‚ฟใ‚’CSVใƒ•ใ‚กใ‚คใƒซใซใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ
115
+ con.execute("COPY samples TO 'sample.csv' (FORMAT CSV, HEADER)")
116
+ # ใƒ‡ใƒผใ‚ฟใ‚’ใ‚ณใƒŸใƒƒใƒˆ
117
+ con.commit()
118
+
119
+ # ใƒ‡ใƒผใ‚ฟใ‚’้ธๆŠž
120
+ cur = con.execute("SELECT * FROM samples")
121
+
122
+ # ็ตๆžœใ‚’ใƒ•ใ‚งใƒƒใƒ
123
+ res = cur.fetchall()
124
+ rows = ""
125
+ # ็ตๆžœใ‚’่กจ็คบ
126
+ # ็ตๆžœใ‚’ๆ–‡ๅญ—ๅˆ—ใซๆ•ดๅฝข
127
+ rows = "\n".join([f"name: {row[0]}, age: {row[1]}" for row in res])
128
+
129
+ # ใ‚ณใƒใ‚ฏใ‚ทใƒงใƒณใ‚’้–‰ใ˜ใ‚‹
130
+ con.close()
131
+ # print(cur.fetchall())
132
+ yield full_response + rows # , history
133
+ return full_response, history
134
+
135
+ PLACEHOLDER = """
136
+ <div style="padding: 30px; text-align: center; display: flex; flex-direction: column; align-items: center;">
137
+ <img src="https://ysharma-dummy-chat-app.hf.space/file=/tmp/gradio/8e75e61cc9bab22b7ce3dec85ab0e6db1da5d107/Meta_lockup_positive%20primary_RGB.jpg" style="width: 80%; max-width: 550px; height: auto; opacity: 0.55; ">
138
+ <h1 style="font-size: 28px; margin-bottom: 2px; opacity: 0.55;">Meta llama3</h1>
139
+ <p style="font-size: 18px; margin-bottom: 2px; opacity: 0.65;">Ask me anything...</p>
140
+ </div>
141
+ """
142
+
143
+ chatbot = gr.Chatbot(height=650, placeholder=PLACEHOLDER, label="Gradio ChatInterface")
144
+
145
+
146
+
147
+ demo44 = gr.ChatInterface(
148
+ fn=chat_with_interpreter,
149
+ chatbot=chatbot,
150
+ fill_height=True,
151
+ additional_inputs_accordion=gr.Accordion(
152
+ label="โš™๏ธ Parameters", open=False, render=False
153
+ ),
154
+ additional_inputs=[
155
+ gr.Slider(
156
+ minimum=0,
157
+ maximum=1,
158
+ step=0.1,
159
+ value=0.95,
160
+ label="Temperature",
161
+ render=False,
162
+ ),
163
+ gr.Slider(
164
+ minimum=128,
165
+ maximum=4096,
166
+ step=1,
167
+ value=512,
168
+ label="Max new tokens",
169
+ render=False,
170
+ ),
171
+ ],
172
+ # democs,
173
+ examples=[
174
+ ["HTMLใฎใ‚ตใƒณใƒ—ใƒซใ‚’ไฝœๆˆใ—ใฆ"],
175
+ [
176
+ "CUDA_VISIBLE_DEVICES=0 llamafactory-cli train examples/lora_single_gpu/llama3_lora_sft.yaml"
177
+ ],
178
+ ],
179
+ cache_examples=False,
180
+ )
routers/chat/demo3.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import gradio as gr
3
+ from mysite.utilities import chat_with_interpreter, completion, process_file
4
+ from interpreter import interpreter
5
+ import mysite.interpreter_config # ใ‚คใƒณใƒใƒผใƒˆใ™ใ‚‹ใ ใ‘ใง่จญๅฎšใŒ้ฉ็”จใ•ใ‚Œใพใ™
6
+ import mysite.interpreter_config
7
+
8
+
9
+
10
+ demo4 = gr.ChatInterface(
11
+ chat_with_interpreter,
12
+ additional_inputs=[
13
+ gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
14
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
15
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
16
+ gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
17
+ ],
18
+ )
workspace/{database.db โ†’ backup/database.db} RENAMED
File without changes
workspace/{db.py โ†’ backup/db.py} RENAMED
File without changes
workspace/{hello.txt โ†’ backup/hello.txt} RENAMED
File without changes
workspace/{index.html โ†’ backup/index.html} RENAMED
File without changes
workspace/{init_db โ†’ backup/init_db} RENAMED
File without changes
workspace/{main.py โ†’ backup/main.py} RENAMED
File without changes
workspace/{mydatabase โ†’ backup/mydatabase} RENAMED
File without changes
workspace/{mydatabase.duckdb โ†’ backup/mydatabase.duckdb} RENAMED
File without changes
workspace/{mydatabase.wal โ†’ backup/mydatabase.wal} RENAMED
File without changes
workspace/{mydb.duckdb โ†’ backup/mydb.duckdb} RENAMED
File without changes
workspace/{new_file.py โ†’ backup/new_file.py} RENAMED
File without changes
workspace/{sample.csv โ†’ backup/sample.csv} RENAMED
File without changes
workspace/{sample.duckdb โ†’ backup/sample.duckdb} RENAMED
File without changes
workspace/{sample.html โ†’ backup/sample.html} RENAMED
File without changes
workspace/{test.csv โ†’ backup/test.csv} RENAMED
File without changes
workspace/{test.txt โ†’ backup/test.txt} RENAMED
File without changes
workspace/{test_file.py โ†’ backup/test_file.py} RENAMED
File without changes
workspace/{test_file.txt โ†’ backup/test_file.txt} RENAMED
File without changes
workspace/{utils/groq_api.py โ†’ backup/utils/db.py} RENAMED
File without changes
workspace/backup/utils/groq_api.py ADDED
File without changes
workspace/{workspace โ†’ backup/workspace}/index.html RENAMED
File without changes
workspace/{workspace โ†’ backup/workspace}/prompts.db RENAMED
File without changes
workspace/{your_module.py โ†’ backup/your_module.py} RENAMED
File without changes