Update app.py
Browse files
app.py
CHANGED
@@ -1,27 +1,32 @@
|
|
1 |
import gradio as gr
|
2 |
import joblib
|
3 |
|
4 |
-
#
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
#
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
#
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
|
|
|
|
|
|
|
|
|
|
25 |
iface.launch()
|
26 |
|
27 |
|
|
|
1 |
import gradio as gr
|
2 |
import joblib
|
3 |
|
4 |
+
# Load your serialized objects
|
5 |
+
model = joblib.load('random_forest_model_3labels2.joblib')
|
6 |
+
encoder = joblib.load('label_encoder2.joblib')
|
7 |
+
vectorizer = joblib.load('count_vectorizer2.joblib')
|
8 |
+
|
9 |
+
def predict(input_text):
|
10 |
+
# Preprocess the input with your vectorizer and encoder as needed
|
11 |
+
# For example, if your model expects vectorized input:
|
12 |
+
vectorized_text = vectorizer.transform([input_text])
|
13 |
+
|
14 |
+
# Make a prediction
|
15 |
+
prediction = model.predict(vectorized_text)
|
16 |
+
|
17 |
+
# If your model's output needs to be decoded (optional)
|
18 |
+
# decoded_prediction = encoder.inverse_transform(prediction)
|
19 |
+
|
20 |
+
# Return the prediction (you might want to convert it into a more readable form)
|
21 |
+
return prediction[0] # Modify this according to your needs
|
22 |
+
|
23 |
+
# Setup the Gradio interface
|
24 |
+
iface = gr.Interface(fn=predict,
|
25 |
+
inputs=gr.Textbox(lines=2, placeholder="Enter Text Here..."),
|
26 |
+
outputs="text",
|
27 |
+
description="Your model description here.")
|
28 |
+
|
29 |
+
# Launch the app
|
30 |
iface.launch()
|
31 |
|
32 |
|