Spaces:
Runtime error
Runtime error
from typing import Any, List, Mapping, Optional | |
from g4f.Provider import ( | |
Ails, | |
You, | |
Bing, | |
Yqcloud, | |
Theb, | |
Aichat, | |
Bard, | |
Vercel, | |
Forefront, | |
Lockchat, | |
Liaobots, | |
H2o, | |
ChatgptLogin, | |
DeepAi, | |
GetGpt | |
) | |
import g4f | |
from langchain.callbacks.manager import CallbackManagerForLLMRun | |
from langchain.llms.base import LLM | |
provider_dict = { | |
'Ails': Ails, | |
'You': You, | |
'Bing': Bing, | |
'Yqcloud': Yqcloud, | |
'Theb': Theb, | |
'Aichat': Aichat, | |
'Bard': Bard, | |
'Vercel': Vercel, | |
'Forefront': Forefront, | |
'Lockchat': Lockchat, | |
'Liaobots': Liaobots, | |
'H2o': H2o, | |
'ChatgptLogin': ChatgptLogin, | |
'DeepAi': DeepAi, | |
'GetGpt': GetGpt | |
} | |
class CustomLLM(LLM): | |
model_name: str="gpt-3.5-turbo" | |
provider_name: str="GetGpt" | |
def _llm_type(self) -> str: | |
return "custom" | |
def _call( | |
self, | |
prompt: str, | |
stop: Optional[List[str]] = None, | |
run_manager: Optional[CallbackManagerForLLMRun] = None, | |
model_name = 'gpt-3.5-turbo', | |
provider = GetGpt | |
) -> str: | |
if stop is not None: | |
raise ValueError("stop kwargs are not permitted.") | |
bot_msg = g4f.ChatCompletion.create(model=self.model_name, | |
provider=provider_dict[self.provider_name], | |
messages=[{"role": "user", | |
"content": prompt}], | |
stream=False) | |
return bot_msg | |
def _identifying_params(self) -> Mapping[str, Any]: | |
"""Get the identifying parameters.""" | |
return {"model:": "gpt-3.5-turbo"} |