Spaces:
Runtime error
Runtime error
:zap: [Enhance] Use nous-mixtral-8x7b as default model
Browse files- README.md +2 -2
- apis/chat_api.py +2 -2
- constants/models.py +1 -1
- examples/chat_with_openai.py +1 -1
- examples/chat_with_post.py +1 -1
- messagers/message_composer.py +1 -1
- messagers/message_outputer.py +1 -1
- messagers/token_checker.py +1 -1
- networks/huggingchat_streamer.py +2 -2
- networks/huggingface_streamer.py +1 -1
README.md
CHANGED
@@ -79,7 +79,7 @@ api_key = "hf_xxxxxxxxxxxxxxxx"
|
|
79 |
|
80 |
client = OpenAI(base_url=base_url, api_key=api_key)
|
81 |
response = client.chat.completions.create(
|
82 |
-
model="mixtral-8x7b",
|
83 |
messages=[
|
84 |
{
|
85 |
"role": "user",
|
@@ -118,7 +118,7 @@ api_key = "hf_xxxxxxxxxxxxxxxx"
|
|
118 |
|
119 |
requests_headers = {}
|
120 |
requests_payload = {
|
121 |
-
"model": "mixtral-8x7b",
|
122 |
"messages": [
|
123 |
{
|
124 |
"role": "user",
|
|
|
79 |
|
80 |
client = OpenAI(base_url=base_url, api_key=api_key)
|
81 |
response = client.chat.completions.create(
|
82 |
+
model="nous-mixtral-8x7b",
|
83 |
messages=[
|
84 |
{
|
85 |
"role": "user",
|
|
|
118 |
|
119 |
requests_headers = {}
|
120 |
requests_payload = {
|
121 |
+
"model": "nous-mixtral-8x7b",
|
122 |
"messages": [
|
123 |
{
|
124 |
"role": "user",
|
apis/chat_api.py
CHANGED
@@ -59,8 +59,8 @@ class ChatAPIApp:
|
|
59 |
|
60 |
class ChatCompletionsPostItem(BaseModel):
|
61 |
model: str = Field(
|
62 |
-
default="mixtral-8x7b",
|
63 |
-
description="(str) `mixtral-8x7b`",
|
64 |
)
|
65 |
messages: list = Field(
|
66 |
default=[{"role": "user", "content": "Hello, who are you?"}],
|
|
|
59 |
|
60 |
class ChatCompletionsPostItem(BaseModel):
|
61 |
model: str = Field(
|
62 |
+
default="nous-mixtral-8x7b",
|
63 |
+
description="(str) `nous-mixtral-8x7b`",
|
64 |
)
|
65 |
messages: list = Field(
|
66 |
default=[{"role": "user", "content": "Hello, who are you?"}],
|
constants/models.py
CHANGED
@@ -7,7 +7,7 @@ MODEL_MAP = {
|
|
7 |
"command-r-plus": "CohereForAI/c4ai-command-r-plus",
|
8 |
"llama3-70b": "meta-llama/Meta-Llama-3-70B-Instruct",
|
9 |
"zephyr-141b": "HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1",
|
10 |
-
"default": "
|
11 |
}
|
12 |
|
13 |
AVAILABLE_MODELS = list(MODEL_MAP.keys())
|
|
|
7 |
"command-r-plus": "CohereForAI/c4ai-command-r-plus",
|
8 |
"llama3-70b": "meta-llama/Meta-Llama-3-70B-Instruct",
|
9 |
"zephyr-141b": "HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1",
|
10 |
+
"default": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
|
11 |
}
|
12 |
|
13 |
AVAILABLE_MODELS = list(MODEL_MAP.keys())
|
examples/chat_with_openai.py
CHANGED
@@ -6,7 +6,7 @@ api_key = "sk-xxxxx"
|
|
6 |
|
7 |
client = OpenAI(base_url=base_url, api_key=api_key)
|
8 |
response = client.chat.completions.create(
|
9 |
-
model="mixtral-8x7b",
|
10 |
messages=[
|
11 |
{
|
12 |
"role": "user",
|
|
|
6 |
|
7 |
client = OpenAI(base_url=base_url, api_key=api_key)
|
8 |
response = client.chat.completions.create(
|
9 |
+
model="nous-mixtral-8x7b",
|
10 |
messages=[
|
11 |
{
|
12 |
"role": "user",
|
examples/chat_with_post.py
CHANGED
@@ -8,7 +8,7 @@ chat_api = "http://127.0.0.1:23333"
|
|
8 |
api_key = "sk-xxxxx"
|
9 |
requests_headers = {}
|
10 |
requests_payload = {
|
11 |
-
"model": "mixtral-8x7b",
|
12 |
"messages": [
|
13 |
{
|
14 |
"role": "user",
|
|
|
8 |
api_key = "sk-xxxxx"
|
9 |
requests_headers = {}
|
10 |
requests_payload = {
|
11 |
+
"model": "nous-mixtral-8x7b",
|
12 |
"messages": [
|
13 |
{
|
14 |
"role": "user",
|
messagers/message_composer.py
CHANGED
@@ -12,7 +12,7 @@ class MessageComposer:
|
|
12 |
if model in AVAILABLE_MODELS:
|
13 |
self.model = model
|
14 |
else:
|
15 |
-
self.model = "mixtral-8x7b"
|
16 |
self.model_fullname = MODEL_MAP[self.model]
|
17 |
self.system_roles = ["system"]
|
18 |
self.inst_roles = ["user", "system", "inst"]
|
|
|
12 |
if model in AVAILABLE_MODELS:
|
13 |
self.model = model
|
14 |
else:
|
15 |
+
self.model = "nous-mixtral-8x7b"
|
16 |
self.model_fullname = MODEL_MAP[self.model]
|
17 |
self.system_roles = ["system"]
|
18 |
self.inst_roles = ["user", "system", "inst"]
|
messagers/message_outputer.py
CHANGED
@@ -7,7 +7,7 @@ class OpenaiStreamOutputer:
|
|
7 |
* https://platform.openai.com/docs/api-reference/chat/create
|
8 |
"""
|
9 |
|
10 |
-
def __init__(self, owned_by="huggingface", model="mixtral-8x7b"):
|
11 |
self.default_data = {
|
12 |
"created": 1700000000,
|
13 |
"id": f"chatcmpl-{owned_by}",
|
|
|
7 |
* https://platform.openai.com/docs/api-reference/chat/create
|
8 |
"""
|
9 |
|
10 |
+
def __init__(self, owned_by="huggingface", model="nous-mixtral-8x7b"):
|
11 |
self.default_data = {
|
12 |
"created": 1700000000,
|
13 |
"id": f"chatcmpl-{owned_by}",
|
messagers/token_checker.py
CHANGED
@@ -11,7 +11,7 @@ class TokenChecker:
|
|
11 |
if model in MODEL_MAP.keys():
|
12 |
self.model = model
|
13 |
else:
|
14 |
-
self.model = "mixtral-8x7b"
|
15 |
|
16 |
self.model_fullname = MODEL_MAP[self.model]
|
17 |
|
|
|
11 |
if model in MODEL_MAP.keys():
|
12 |
self.model = model
|
13 |
else:
|
14 |
+
self.model = "nous-mixtral-8x7b"
|
15 |
|
16 |
self.model_fullname = MODEL_MAP[self.model]
|
17 |
|
networks/huggingchat_streamer.py
CHANGED
@@ -20,7 +20,7 @@ class HuggingchatRequester:
|
|
20 |
if model in MODEL_MAP.keys():
|
21 |
self.model = model
|
22 |
else:
|
23 |
-
self.model = "mixtral-8x7b"
|
24 |
self.model_fullname = MODEL_MAP[self.model]
|
25 |
|
26 |
def get_hf_chat_id(self):
|
@@ -208,7 +208,7 @@ class HuggingchatStreamer:
|
|
208 |
if model in MODEL_MAP.keys():
|
209 |
self.model = model
|
210 |
else:
|
211 |
-
self.model = "mixtral-8x7b"
|
212 |
self.model_fullname = MODEL_MAP[self.model]
|
213 |
self.message_outputer = OpenaiStreamOutputer(model=self.model)
|
214 |
|
|
|
20 |
if model in MODEL_MAP.keys():
|
21 |
self.model = model
|
22 |
else:
|
23 |
+
self.model = "nous-mixtral-8x7b"
|
24 |
self.model_fullname = MODEL_MAP[self.model]
|
25 |
|
26 |
def get_hf_chat_id(self):
|
|
|
208 |
if model in MODEL_MAP.keys():
|
209 |
self.model = model
|
210 |
else:
|
211 |
+
self.model = "nous-mixtral-8x7b"
|
212 |
self.model_fullname = MODEL_MAP[self.model]
|
213 |
self.message_outputer = OpenaiStreamOutputer(model=self.model)
|
214 |
|
networks/huggingface_streamer.py
CHANGED
@@ -14,7 +14,7 @@ class HuggingfaceStreamer:
|
|
14 |
if model in MODEL_MAP.keys():
|
15 |
self.model = model
|
16 |
else:
|
17 |
-
self.model = "mixtral-8x7b"
|
18 |
self.model_fullname = MODEL_MAP[self.model]
|
19 |
self.message_outputer = OpenaiStreamOutputer(model=self.model)
|
20 |
|
|
|
14 |
if model in MODEL_MAP.keys():
|
15 |
self.model = model
|
16 |
else:
|
17 |
+
self.model = "nous-mixtral-8x7b"
|
18 |
self.model_fullname = MODEL_MAP[self.model]
|
19 |
self.message_outputer = OpenaiStreamOutputer(model=self.model)
|
20 |
|