File size: 2,926 Bytes
5120311
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
from entity import Docs, Cluster, Preprocess, SummaryInput
from fastapi import FastAPI
import time
import hashlib
import json
from fastapi.middleware.cors import CORSMiddleware
from iclibs.ic_rabbit import ICRabbitMQ
from get_config import config_params

app = FastAPI()
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)


def get_hash_id(item: Docs):
    str_hash = ""
    for it in item.response["docs"]:
        str_hash += it["url"]
    str_hash += str(item.top_cluster)
    str_hash += str(item.top_sentence)
    str_hash += str(item.topn_summary)
    str_hash += str(item.top_doc)
    str_hash += str(item.threshold)
    if item.sorted_field.strip():
        str_hash += str(item.sorted_field)
    return hashlib.sha224(str_hash.encode("utf-8")).hexdigest()


try:
    with open("log_mnews/log.txt") as f:
        data_dict = json.load(f)
except Exception as ve:
    print(ve)
    data_dict = {}

def init_rabbit_queue(usr, passw, host, vir_host, queue_name, durable, max_priority, exchange=""):
    connection = ICRabbitMQ(host, vir_host, usr, passw)
    connection.init_connection()
    channel = connection.init_queue(
        queue_name, exchange=exchange, durable=durable, max_priority=max_priority)
    return channel, connection, queue_name


@app.post("/mnews/topic_clustering")
async def topic_clustering_v2(item: Docs):
    print("command id: ", item.command_id)
    docs = item.response["docs"]
    meta = item.response.get('meta', {})
    # threshold = item.threshold
    print("start ")
    print("len doc: ", len(docs))
    st_time = time.time()
    top_cluster = item.top_cluster
    top_sentence = item.top_sentence
    topn_summary = item.topn_summary
    hash_str = get_hash_id(item)
    # threshold = 0.1
    # item.threshold = threshold

    # with open("log/input_{0}.txt".format(st_time), "w+") as f:
    #     f.write(json.dumps({"docs": docs, "key": item.keyword}))
    
    data_push = {
       
       "threshold": item.threshold,
       "top_cluster": top_cluster,
       "top_sentence": top_sentence,
       "topn_summary": topn_summary,
       "hash_str": hash_str,
       "st_time": st_time,
       "command_id": item.command_id,
       "docs": docs,
       "meta": meta
    }
    params = config_params['queue_topic_clustering_mnews']
    usr_name = params["usr_name"]
    password = str(params["password"])
    host = params["host"]
    virtual_host = params["virtual_host"]
    queue_name = params["queue_name"]

    channel_consumer, rb_consumer, queue_consumer = init_rabbit_queue(usr_name, password, host, virtual_host, queue_name, True, 10)
    
    ICRabbitMQ.publish_message(channel_consumer,  queue_consumer, data_push, priority= 1,delivery_mode=2, exchange='')
    return {"message":"success"}