KvrParaskevi commited on
Commit
871fa88
·
verified ·
1 Parent(s): b6e50dc

Update model.py

Browse files
Files changed (1) hide show
  1. model.py +57 -0
model.py CHANGED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
2
+ from langchain import HuggingFaceHub
3
+ from langchain.llms.base import LLM
4
+ from langchain.memory import ConversationBufferMemory,ConversationBufferWindowMemory
5
+ from langchain.chains import LLMChain, ConversationChain
6
+ from langchain_community.llms.huggingface_pipeline import HuggingFacePipeline
7
+ from langchain_community.llms import HuggingFaceEndpoint
8
+ from langchain.prompts import PromptTemplate, ChatPromptTemplate
9
+
10
+ your_endpoint_url = "https://kp4xdy196cw81uf3.us-east-1.aws.endpoints.huggingface.cloud"
11
+
12
+ llm = HuggingFaceEndpoint(
13
+ endpoint_url=f"{your_endpoint_url}",
14
+ huggingfacehub_api_token = ,
15
+ task = "text-generation",
16
+ max_new_tokens=128,
17
+ top_k=10,
18
+ top_p=0.95,
19
+ typical_p=0.95,
20
+ temperature=0.01,
21
+ repetition_penalty=1.03
22
+ )
23
+ #print(llm)
24
+
25
+ def chat_template_prompt():
26
+ template = """
27
+ Do not repeat questions and do not generate answer for user/human.
28
+
29
+ You are a helpful hotel booking asssitant.
30
+ Below is an instruction that describes a task.
31
+ Write a response that appropriately completes the request.
32
+ Reply with the most helpful and logic answer. During the conversation you need to ask the user
33
+ the following questions to complete the hotel booking task.
34
+ 1) Where would you like to stay and when?
35
+ 2) How many people are staying in the room?
36
+ 3) Do you prefer any ammenities like breakfast included or gym?
37
+ 4) What is your name, your email address and phone number?
38
+
39
+ When the booking task is completed, respond with "Thank you for choosing us.".
40
+
41
+ {history}
42
+
43
+ """
44
+
45
+ system_prompt = SystemMessagePromptTemplate.from_template(template)
46
+ human_prompt = HumanMessagePromptTemplate.from_template("{input}")
47
+ chat_prompt = ChatPromptTemplate.from_messages([system_prompt, human_prompt])
48
+ return chat_prompt
49
+
50
+ def chain():
51
+ #memory = ConversationBufferMemory(memory_key="history")
52
+ chat_prompt = chat_template_prompt()
53
+ memory = ConversationBufferWindowMemory(k=3, memory_key="history")
54
+ llm_chain = LLMChain(llm=llm, memory = memory, prompt = chat_prompt)
55
+ memory.load_memory_variables({}) #Initialize memory
56
+ return llm_chain
57
+