Spaces:
Running
on
Zero
Running
on
Zero
ai-forever
commited on
Commit
•
27539d2
1
Parent(s):
e74c6f7
max_tokens=128 by default
Browse files- src/gigachat.py +3 -3
src/gigachat.py
CHANGED
@@ -64,7 +64,7 @@ def get_response(
|
|
64 |
n: int = 1,
|
65 |
fuse_key_word: Optional[str] = None,
|
66 |
use_giga_censor: bool = False,
|
67 |
-
max_tokens: int =
|
68 |
) -> requests.Response:
|
69 |
"""
|
70 |
Send a text generation request to the API.
|
@@ -125,7 +125,7 @@ def get_response(
|
|
125 |
def giga_generate(
|
126 |
prompt: str,
|
127 |
model_version: str = "GigaChat-Max",
|
128 |
-
max_tokens: int =
|
129 |
) -> str:
|
130 |
"""
|
131 |
Generate text using the GigaChat model.
|
@@ -142,7 +142,7 @@ def giga_generate(
|
|
142 |
prompt,
|
143 |
model_version,
|
144 |
use_giga_censor=False,
|
145 |
-
max_tokens=
|
146 |
)
|
147 |
response_dict = response.json()
|
148 |
|
|
|
64 |
n: int = 1,
|
65 |
fuse_key_word: Optional[str] = None,
|
66 |
use_giga_censor: bool = False,
|
67 |
+
max_tokens: int = 128,
|
68 |
) -> requests.Response:
|
69 |
"""
|
70 |
Send a text generation request to the API.
|
|
|
125 |
def giga_generate(
|
126 |
prompt: str,
|
127 |
model_version: str = "GigaChat-Max",
|
128 |
+
max_tokens: int = 128
|
129 |
) -> str:
|
130 |
"""
|
131 |
Generate text using the GigaChat model.
|
|
|
142 |
prompt,
|
143 |
model_version,
|
144 |
use_giga_censor=False,
|
145 |
+
max_tokens=max_tokens,
|
146 |
)
|
147 |
response_dict = response.json()
|
148 |
|