|
|
|
|
|
|
|
|
|
|
|
import os |
|
import re |
|
import uuid |
|
import time |
|
import json |
|
import asyncio |
|
import requests |
|
from pathlib import Path |
|
from datetime import datetime |
|
from dotenv import load_dotenv |
|
|
|
import chainlit as cl |
|
from chainlit import user_session |
|
|
|
from langchain import hub |
|
from langchain_openai import OpenAI |
|
from langchain.chains import LLMChain |
|
from langchain_core.prompts import PromptTemplate |
|
from langchain.memory.buffer import ConversationBufferMemory |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
load_dotenv() |
|
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") |
|
auth_token = os.environ.get("DAYSOFF_API_TOKEN") |
|
API_URL = "https://aivisions.no/data/daysoff/api/v1/booking/" |
|
|
|
|
|
|
|
|
|
|
|
|
|
daysoff_assistant_template = """ |
|
You are a customer support assistant for Daysoff kundeservice and help users retrieve booking information associated with their booking IDs. |
|
By default, you respond using Norwegian bokmΓ₯l. |
|
Provide a conversational answer. |
|
This way you directly address the user's question in a manner that reflects the professionalism and warmth |
|
of a customer support representative (female). |
|
============================ |
|
Chat History: {chat_history} |
|
Question: {question} |
|
============================ |
|
Answer in Markdown: |
|
""" |
|
|
|
daysoff_assistant_prompt = PromptTemplate( |
|
input_variables=["chat_history", "question"], |
|
template=daysoff_assistant_template, |
|
) |
|
|
|
async def async_post_request(url, headers, data): |
|
return await asyncio.to_thread(requests.post, url, headers=headers, json=data) |
|
|
|
@cl.set_starters |
|
async def set_starters(): |
|
return [ |
|
cl.Starter( |
|
label="Booking ID request", |
|
message="Kan du gi meg info om en reservasjon?", |
|
icon="/public/booking_id.svg", |
|
), |
|
cl.Starter( |
|
label="Metric Space Self-Identity Framework", |
|
message="Explain the Metric Space Self-Identity Framework like I'm five years old.", |
|
icon="/public/learn.svg", |
|
), |
|
cl.Starter( |
|
label="Python script for daily email reports", |
|
message="Write a script to automate sending daily email reports in Python, and walk me through how I would set it up.", |
|
icon="/public/terminal.svg", |
|
), |
|
cl.Starter( |
|
label="Morning routine ideation", |
|
message="Can you help me create a personalized Yoga/pranayama/meditation morning routine that would help increase my productivity throughout the day? Start by asking me about my current habits and what activities energize me in the morning.", |
|
icon="/public/idea.svg", |
|
) |
|
] |
|
|
|
def create_output_path(user=None): |
|
this_uuid = str(uuid.uuid4()) |
|
ts = round(time.time()) |
|
dt_object = datetime.fromtimestamp(ts) |
|
date = str(dt_object).replace(" ","_").replace(":","-") |
|
|
|
base_path = os.environ.get('HOME', '/home/user') |
|
logs_path = Path(f"{base_path}/logs") |
|
|
|
path_out = logs_path / f"{date}--{user or 'default'}--{this_uuid}" |
|
path_out.mkdir(exist_ok=True, parents=True) |
|
|
|
return path_out |
|
|
|
@cl.on_chat_start |
|
def init_session(): |
|
cl.user_session.set('chat_history', []) |
|
cl.user_session.set('path_out', create_output_path()) |
|
cl.user_session.set("session_created_at", datetime.now()) |
|
|
|
|
|
|
|
|
|
|
|
|
|
llm = OpenAI( |
|
model="gpt-3.5-turbo-instruct", |
|
temperature=0.7, |
|
openai_api_key=OPENAI_API_KEY, |
|
max_tokens=2048, |
|
top_p=0.9, |
|
frequency_penalty=0.1, |
|
presence_penalty=0.1, |
|
) |
|
|
|
conversation_memory = ConversationBufferMemory( |
|
memory_key="chat_history", |
|
max_len=30, |
|
return_messages=True |
|
) |
|
|
|
llm_chain = daysoff_assistant_prompt | llm |
|
memory = conversation_memory |
|
cl.user_session.set("llm_chain", llm_chain) |
|
cl.user_session.set("memory", memory) |
|
|
|
@cl.on_message |
|
async def handle_message(message: cl.Message): |
|
|
|
session_created_at = cl.user_session.get("session_created_at", datetime.now()) |
|
path_out = cl.user_session.get('path_out') |
|
user_message = message.content |
|
llm_chain = cl.user_session.get("llm_chain") |
|
memory = cl.user_session.get("memory") |
|
|
|
booking_pattern = r'\b[A-Z]{6}\d{6}\b' |
|
match = re.search(booking_pattern, user_message) |
|
|
|
if match: |
|
bestillingskode = match.group() |
|
headers = { |
|
"Authorization": auth_token, |
|
"Content-Type": "application/json" |
|
} |
|
payload = {"booking_id": bestillingskode} |
|
|
|
try: |
|
response = await async_post_request(API_URL, headers, payload) |
|
response.raise_for_status() |
|
booking_data = response.json() |
|
|
|
if "booking_id" in booking_data: |
|
user_message = booking_data.group() |
|
try: |
|
response = await llm_chain.ainvoke({ |
|
"question": user_message, |
|
"chat_history": memory |
|
}, callbacks=[cl.AsyncLangchainCallbackHandler()]) |
|
await cl.Message(content=response).send() |
|
|
|
except Exception as e: |
|
await cl.Message(content=f"Error processing booking data: {str(e)}").send() |
|
|
|
else: |
|
await cl.Message(content="Booking not found.").send() |
|
|
|
except requests.exceptions.RequestException as e: |
|
await cl.Message(content=f"Request failed: {str(e)}").send() |
|
|
|
else: |
|
try: |
|
response = await llm_chain.ainvoke({ |
|
"question": user_message, |
|
"chat_history": memory |
|
}, callbacks=[cl.AsyncLangchainCallbackHandler()]) |
|
|
|
await cl.Message(content=response["text"]).send() |
|
|
|
except Exception as e: |
|
await cl.Message(content=f"Error: {str(e)}").send() |
|
|