File size: 6,929 Bytes
12654ba
bef3e08
747720a
12654ba
 
 
 
67cba93
 
 
 
12654ba
67cba93
 
12654ba
67cba93
12654ba
957f6d4
67cba93
12654ba
 
 
 
 
 
747720a
62ea741
 
 
747720a
 
12654ba
 
78b9b84
12654ba
 
bef3e08
 
 
62ea741
bef3e08
12654ba
c34676c
747720a
 
 
 
bef3e08
efdd6c6
 
bef3e08
 
12654ba
bef3e08
12654ba
3951ea6
12654ba
 
 
 
 
 
bef3e08
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
747720a
67cba93
 
 
 
 
 
 
 
 
 
 
 
 
 
 
957f6d4
3e9531c
 
 
 
 
957f6d4
67cba93
 
 
12654ba
747720a
12654ba
 
 
 
 
 
 
 
 
a77a669
 
 
12654ba
 
62ea741
 
12654ba
62ea741
 
12654ba
 
67cba93
3e9531c
 
12654ba
 
62ea741
747720a
 
12654ba
 
 
 
 
 
 
 
 
 
 
 
747720a
12654ba
 
 
ea12aba
747720a
ea12aba
bef3e08
62ea741
bef3e08
8904a92
747720a
 
fc84509
efdd6c6
12654ba
67cba93
fc84509
 
 
 
12654ba
a77a669
ea12aba
a77a669
8904a92
a77a669
747720a
a77a669
747720a
a77a669
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
# ===========================================
# title: daysoff-assistant-API-v2, table
# file: app.py
# ===========================================

import os
import re
import uuid
import time
import json
import asyncio
import requests
from pathlib import Path
from datetime import datetime
from dotenv import load_dotenv

import chainlit as cl
from chainlit import user_session

from langchain import hub
from langchain_openai import OpenAI
from langchain.chains import LLMChain
from langchain_core.prompts import PromptTemplate
from langchain.memory.buffer import ConversationBufferMemory

# ---------------------------------------------------for backend looks, example file:----------------------------------
#with open('/home/user/.local/lib/python3.10/site-packages/socketio/async_server.py', 'r') as file:
    #content = file.read()
    #print("see line 640:", content)
# ------------------------------------------------------the end--------------------------------------------------------

load_dotenv()
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
auth_token = os.environ.get("DAYSOFF_API_TOKEN")
API_URL = "https://aivisions.no/data/daysoff/api/v1/booking/"

#If booking information is requested, and with
#retrieved booking information: {table} in mind, provide a conversational answer.
#If no booking information is requested, provide a conversational answer.
#combined_message = f"### Informasjon for Bestillingskode:\n\n{table}" 

daysoff_assistant_template = """
You are a customer support assistant for Daysoff kundeservice and help users retrieve booking information associated with their booking IDs.
By default, you respond using Norwegian bokmΓ₯l.
Provide a conversational answer.
This way you directly address the user's question in a manner that reflects the professionalism and warmth
of a customer support representative (female).
============================
Chat History: {chat_history}
Question: {question}
============================
Answer in Markdown:
"""

daysoff_assistant_prompt = PromptTemplate(
    input_variables=["chat_history", "question"],
    template=daysoff_assistant_template,
)

async def async_post_request(url, headers, data):
    return await asyncio.to_thread(requests.post, url, headers=headers, json=data)

@cl.set_starters
async def set_starters():
    return [
        cl.Starter(
            label="Booking ID request",
            message="Kan du gi meg info om en reservasjon?",
            icon="/public/booking_id.svg",
            ),
        cl.Starter(
            label="Metric Space Self-Identity Framework",
            message="Explain the Metric Space Self-Identity Framework like I'm five years old.",
            icon="/public/learn.svg",
            ),
        cl.Starter(
            label="Python script for daily email reports",
            message="Write a script to automate sending daily email reports in Python, and walk me through how I would set it up.",
            icon="/public/terminal.svg",
            ),
        cl.Starter(
            label="Morning routine ideation",
            message="Can you help me create a personalized Yoga/pranayama/meditation morning routine that would help increase my productivity throughout the day? Start by asking me about my current habits and what activities energize me in the morning.",
            icon="/public/idea.svg",
            )
        ]

def create_output_path(user=None):
    this_uuid = str(uuid.uuid4())
    ts = round(time.time())
    dt_object = datetime.fromtimestamp(ts)
    date = str(dt_object).replace(" ","_").replace(":","-")
    
    base_path = os.environ.get('HOME', '/home/user')
    logs_path = Path(f"{base_path}/logs")
    
    path_out = logs_path / f"{date}--{user or 'default'}--{this_uuid}"
    path_out.mkdir(exist_ok=True, parents=True)
    
    return path_out
    
@cl.on_chat_start # <β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€” the Chainlit context
def init_session():
    cl.user_session.set('chat_history', [])
    cl.user_session.set('path_out', create_output_path())
    cl.user_session.set("session_created_at", datetime.now()) 
    #print("chat_history: ", cl.user_session.get('chat_history'))
    #print("path_out: ", cl.user_session.get('path_out'))

    #cl.user_session.set("connection_attempts", 0)
    #cl.user_session.set("connection_attempts", connection_attempts + 1)

    llm = OpenAI(
        model="gpt-3.5-turbo-instruct",
        temperature=0.7,
        openai_api_key=OPENAI_API_KEY,
        max_tokens=2048,
        top_p=0.9,
        frequency_penalty=0.1,
        presence_penalty=0.1,
    )

    conversation_memory = ConversationBufferMemory(
        memory_key="chat_history",
        max_len=30,
        return_messages=True
    )

    llm_chain = daysoff_assistant_prompt | llm
    memory = conversation_memory
    cl.user_session.set("llm_chain", llm_chain)
    cl.user_session.set("memory", memory)
    
@cl.on_message
async def handle_message(message: cl.Message):
    #connection_attempts = cl.user_session.get("connection_attempts", 0)
    session_created_at = cl.user_session.get("session_created_at", datetime.now())
    path_out = cl.user_session.get('path_out')
    user_message = message.content
    llm_chain = cl.user_session.get("llm_chain")
    memory = cl.user_session.get("memory")

    booking_pattern = r'\b[A-Z]{6}\d{6}\b'
    match = re.search(booking_pattern, user_message)

    if match:
        bestillingskode = match.group()
        headers = {
            "Authorization": auth_token,
            "Content-Type": "application/json"
        }
        payload = {"booking_id": bestillingskode}

        try:
            response = await async_post_request(API_URL, headers, payload)
            response.raise_for_status()
            booking_data = response.json()

            if "booking_id" in booking_data:
                user_message = booking_data.group()
                try:
                    response = await llm_chain.ainvoke({
                        "question": user_message,
                        "chat_history": memory
                    }, callbacks=[cl.AsyncLangchainCallbackHandler()])
                    await cl.Message(content=response).send()

                except Exception as e:
                    await cl.Message(content=f"Error processing booking data: {str(e)}").send()

            else:
                await cl.Message(content="Booking not found.").send()

        except requests.exceptions.RequestException as e:
            await cl.Message(content=f"Request failed: {str(e)}").send()

    else:
        try:
            response = await llm_chain.ainvoke({
                "question": user_message,
                "chat_history": memory
            }, callbacks=[cl.AsyncLangchainCallbackHandler()])

            await cl.Message(content=response["text"]).send()

        except Exception as e:
            await cl.Message(content=f"Error: {str(e)}").send()