Spaces:
Sleeping
Sleeping
Dhahlan2000
commited on
Commit
•
686fefe
1
Parent(s):
1359c84
Update app.py
Browse files
app.py
CHANGED
@@ -107,24 +107,17 @@ def transliterate_to_sinhala(text):
|
|
107 |
|
108 |
# interface = gr.Interface.load("huggingface/microsoft/Phi-3-mini-4k-instruct")
|
109 |
|
110 |
-
API_URL = "https://api-inference.huggingface.co/models/microsoft/Phi-3-mini-4k-instruct"
|
111 |
-
headers = {"Authorization": f"Bearer {access_token}"}
|
112 |
|
113 |
-
|
114 |
-
response = requests.post(API_URL, headers=headers, json=payload)
|
115 |
-
return response.json()
|
116 |
|
|
|
|
|
|
|
|
|
117 |
def conversation_predict(text):
|
118 |
-
|
119 |
-
"inputs": f"{text}",
|
120 |
-
})
|
121 |
-
if 'generated_text' in response_json[0]:
|
122 |
-
ai_response = response_json[0]['generated_text']
|
123 |
-
elif 'text' in response_json[0]:
|
124 |
-
ai_response = response_json[0]['text']
|
125 |
-
else:
|
126 |
-
ai_response = response_json[0]
|
127 |
-
|
128 |
return ai_response
|
129 |
|
130 |
def ai_predicted(user_input):
|
|
|
107 |
|
108 |
# interface = gr.Interface.load("huggingface/microsoft/Phi-3-mini-4k-instruct")
|
109 |
|
110 |
+
# API_URL = "https://api-inference.huggingface.co/models/microsoft/Phi-3-mini-4k-instruct"
|
111 |
+
# headers = {"Authorization": f"Bearer {access_token}"}
|
112 |
|
113 |
+
ai_pipe = pipeline("text-generation", model="google/gemma-2b-it", token = access_token)
|
|
|
|
|
114 |
|
115 |
+
# def query(payload):
|
116 |
+
# response = requests.post(API_URL, headers=headers, json=payload)
|
117 |
+
# return response.json()
|
118 |
+
|
119 |
def conversation_predict(text):
|
120 |
+
ai_response = ai_pipe([{"role": "user", "content": f"{text}"}])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
121 |
return ai_response
|
122 |
|
123 |
def ai_predicted(user_input):
|