AWEsumCare-Demo / service_provider_config.py
Josh Wong
misc: Change to use Azure OpenAI
cf4341b unverified
import os
from dotenv import load_dotenv
from llama_index.embeddings.openai import OpenAIEmbedding
from llama_index.llms.openai import OpenAI
from llama_index.llms.azure_openai import AzureOpenAI
from llama_index.embeddings.azure_openai import AzureOpenAIEmbedding
from schemas import ServiceProvider, ChatbotVersion
load_dotenv()
def get_service_provider_config(service_provider: ServiceProvider, model_name: str=ChatbotVersion.CHATGPT_35.value):
if service_provider == ServiceProvider.AZURE:
return get_azure_openai_config(model_name = model_name)
if service_provider == ServiceProvider.OPENAI:
llm = OpenAI(model=model_name)
embed_model = OpenAIEmbedding()
return llm, embed_model
# The engine name needs to be the same as the deployment name in Azure.
def get_azure_openai_config(model_name: str):
api_key = os.getenv("AZURE_OPENAI_API_KEY")
azure_endpoint = "https://awesumcare.openai.azure.com/"
api_version = "2024-10-01-preview"
llm = AzureOpenAI(
engine=model_name,
model=model_name,
api_key=api_key,
azure_endpoint=azure_endpoint,
api_version=api_version,
)
# You need to deploy your own embedding model as well as your own chat completion model
embed_model = AzureOpenAIEmbedding(
deployment_name="text-embedding-ada-002",
api_key=api_key,
azure_endpoint=azure_endpoint,
api_version=api_version,
)
return llm, embed_model