File size: 3,242 Bytes
6384276
cdd7729
ef9a278
e04aa8c
6384276
 
 
 
ef9a278
e04aa8c
ef9a278
6384276
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
725eacb
 
 
 
 
6384276
725eacb
 
 
 
 
 
ef9a278
6384276
 
ef9a278
725eacb
ef9a278
 
 
 
e04aa8c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
from fastapi import FastAPI, HTTPException
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
import requests
from transformers import pipeline, AutoModelForSequenceClassification, AutoTokenizer
import torch
from pydantic import BaseModel

app = FastAPI()

# Load model directly from Hugging Face Hub
model_name = "SandboxBhh/sentiment-thai-text-model"

try:
    device = 0 if torch.cuda.is_available() else -1  # Check if GPU is available
    # Ensure correct indentation here
    reloaded_pipe = pipeline(
        "text-classification",
        model=model_name,
        tokenizer=model_name,
        device=device,

    )

except Exception as e:
    print(f"Error loading model: {e}")
    reloaded_pipe = None

class TextInput(BaseModel):
    text: str

def send_line_notification(message, line_token):
    url = "https://notify-api.line.me/api/notify"
    headers = {"Authorization": f"Bearer {line_token}"}
    data = {"message": message}
    response = requests.post(url, headers=headers, data=data)
    return response.status_code

def split_message(message, max_length=1000):
    return [message[i:i+max_length] for i in range(0, len(message), max_length)]

# Use environment variable for LINE token
line_token = "C9r65PpEvIvOJSK2xMhgl53WvmOhhnKEOuQq7DsiVJT"

@app.post("/classify-text")
async def classify_text(input: TextInput):
    if reloaded_pipe is None:
        raise HTTPException(status_code=500, detail="Model not loaded")

    try:
        result = reloaded_pipe(input.text)
        sentiment = result[0]['label'].lower()  
        score = result[0]['score']

        if sentiment == 'neg':
            message = f"[แจ้งเตือน CSI]: ความพึงพอใจของผู้ป่วย \n ข้อความ: {input.text} \n csi score: {score:.2f}"
            message_parts = split_message(message)

            for i, part in enumerate(message_parts):
                status = send_line_notification(part, line_token)
                if status == 200:
                    print(f"ส่งการแจ้งเตือนส่วนที่ {i+1}/{len(message_parts)} ผ่าน LINE สำเร็จ")
                else:
                    print(f"การส่งการแจ้งเตือนส่วนที่ {i+1}/{len(message_parts)} ผ่าน LINE ล้มเหลว")

            return {
                "result": result, 
                "message": f"Negative sentiment detected and notification sent to LINE. \n{message}",
                "formatted_message": message
            }
        else:
            message = f"[Sentiment Info]: ข้อความ: {input.text} \n csi score: {score:.2f}"
            return {
                "result": result, 
                "message": "Sentiment is not negative. No notification sent.",
                "formatted_message": message
            }

    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))


app.mount("/", StaticFiles(directory="static", html=True), name="static")

@app.get("/")
def index() -> FileResponse:
    return FileResponse(path="/app/static/index.html", media_type="text/html")