jarif commited on
Commit
b52237e
β€’
1 Parent(s): efeb7be

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +93 -79
app.py CHANGED
@@ -1,79 +1,93 @@
1
- import streamlit as st
2
- import numpy as np
3
- from tensorflow.keras.models import load_model
4
- from tensorflow.keras.preprocessing.sequence import pad_sequences
5
- from tensorflow.keras.preprocessing.text import one_hot
6
- import pickle
7
- import emoji
8
-
9
- # Streamlit app title
10
- st.title('Unveiling Sentiment A Deep Dive into Sentiment Analysis :koala:')
11
-
12
- # Function to load model and predict sentiment
13
- def predict_sentiment(custom_data):
14
- try:
15
- # Load the trained model
16
- model = load_model('sentiment_analysis_model.h5')
17
-
18
- # Load the one-hot encoding information
19
- with open('one_hot_info_1.pkl', 'rb') as handle:
20
- one_hot_info = pickle.load(handle)
21
-
22
- vocab_size = one_hot_info['vocab_size']
23
- max_len = one_hot_info['max_len']
24
-
25
- # Define labels with emojis
26
- labels_with_emojis = {
27
- 'Positive': '😊',
28
- 'Neutral': '😐',
29
- 'Negative': 'πŸ˜”'
30
- }
31
-
32
- # One-hot encode each tweet
33
- one_hot_texts = [one_hot(text, vocab_size) for text in custom_data]
34
-
35
- # Pad the sequences
36
- padded_texts = pad_sequences(one_hot_texts, padding='pre', maxlen=max_len)
37
-
38
- # Predict the sentiments for all tweets
39
- predictions = model.predict(np.array(padded_texts))
40
-
41
- # Convert predictions to class labels and probabilities
42
- predicted_sentiments = []
43
- for prediction in predictions:
44
- sentiment = np.argmax(prediction)
45
- sentiment_label = list(labels_with_emojis.keys())[sentiment]
46
- sentiment_emoji = labels_with_emojis[sentiment_label]
47
- sentiment_probabilities = {label: round(prob, 4) for label, prob in zip(labels_with_emojis.keys(), prediction)}
48
- predicted_sentiments.append((sentiment_label, sentiment_emoji, sentiment_probabilities))
49
-
50
- return predicted_sentiments
51
-
52
- except Exception as e:
53
- st.error(f"Error during prediction: {e}")
54
- return None
55
-
56
- # Streamlit UI
57
- user_input = st.text_area("Please enter the tweet you'd like analyzed::whale:")
58
-
59
- if st.button('Analyze'):
60
- if user_input.strip(): # Check if input is not empty
61
- # Remove emojis and replace with their description
62
- user_input = emoji.demojize(user_input)
63
-
64
- # Split input by newlines to handle multiple tweets
65
- tweets = user_input.split('\n')
66
-
67
- # Predict sentiment for custom data
68
- predicted_sentiments = predict_sentiment(tweets)
69
-
70
- if predicted_sentiments is not None:
71
- # Display results for each tweet
72
- st.write("## Predicted Sentiments:")
73
- for i, (sentiment_label, sentiment_emoji, sentiment_probabilities) in enumerate(predicted_sentiments):
74
- st.write(f"Tweet {i+1}: {sentiment_label} {sentiment_emoji}")
75
- st.write("Probabilities:")
76
- for label, prob in sentiment_probabilities.items():
77
- st.write(f"{label}: {prob:.4f}")
78
- else:
79
- st.write("Please enter tweet(s) to analyze.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+ import numpy as np
4
+ from tensorflow.keras.models import load_model
5
+ from tensorflow.keras.preprocessing.sequence import pad_sequences
6
+ from tensorflow.keras.preprocessing.text import one_hot
7
+ import pickle
8
+ import emoji
9
+
10
+ # Set TensorFlow to use only CPU
11
+ os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
12
+
13
+ # Streamlit app title
14
+ st.title('Unveiling Sentiment: A Deep Dive into Sentiment Analysis 🐨')
15
+
16
+ # Function to load model and predict sentiment
17
+ def predict_sentiment(custom_data):
18
+ try:
19
+ # Load the trained model from the new format
20
+ model_path = 'saved_model_format'
21
+ if not os.path.exists(model_path):
22
+ st.error(f"Model file not found: {model_path}")
23
+ return None
24
+ model = load_model(model_path)
25
+ st.write("Model loaded successfully.")
26
+
27
+ # Load the one-hot encoding information
28
+ one_hot_info_path = 'one_hot_info_1.pkl'
29
+ if not os.path.exists(one_hot_info_path):
30
+ st.error(f"One-hot info file not found: {one_hot_info_path}")
31
+ return None
32
+ with open(one_hot_info_path, 'rb') as handle:
33
+ one_hot_info = pickle.load(handle)
34
+ st.write("One-hot info loaded successfully.")
35
+
36
+ vocab_size = one_hot_info['vocab_size']
37
+ max_len = one_hot_info['max_len']
38
+
39
+ # Define labels with emojis
40
+ labels_with_emojis = {
41
+ 'Positive': '😊',
42
+ 'Neutral': '😐',
43
+ 'Negative': 'πŸ˜”'
44
+ }
45
+
46
+ # One-hot encode each tweet
47
+ one_hot_texts = [one_hot(text, vocab_size) for text in custom_data]
48
+
49
+ # Pad the sequences
50
+ padded_texts = pad_sequences(one_hot_texts, padding='pre', maxlen=max_len)
51
+
52
+ # Predict the sentiments for all tweets
53
+ predictions = model.predict(np.array(padded_texts))
54
+
55
+ # Convert predictions to class labels and probabilities
56
+ predicted_sentiments = []
57
+ for prediction in predictions:
58
+ sentiment = np.argmax(prediction)
59
+ sentiment_label = list(labels_with_emojis.keys())[sentiment]
60
+ sentiment_emoji = labels_with_emojis[sentiment_label]
61
+ sentiment_probabilities = {label: round(prob, 4) for label, prob in zip(labels_with_emojis.keys(), prediction)}
62
+ predicted_sentiments.append((sentiment_label, sentiment_emoji, sentiment_probabilities))
63
+
64
+ return predicted_sentiments
65
+
66
+ except Exception as e:
67
+ st.error(f"Error during prediction: {e}")
68
+ return None
69
+
70
+ # Streamlit UI
71
+ user_input = st.text_area("Please enter the tweet you'd like analyzed πŸ‹")
72
+
73
+ if st.button('Analyze'):
74
+ if user_input.strip(): # Check if input is not empty
75
+ # Remove emojis and replace with their description
76
+ user_input = emoji.demojize(user_input)
77
+
78
+ # Split input by newlines to handle multiple tweets
79
+ tweets = user_input.split('\n')
80
+
81
+ # Predict sentiment for custom data
82
+ predicted_sentiments = predict_sentiment(tweets)
83
+
84
+ if predicted_sentiments is not None:
85
+ # Display results for each tweet
86
+ st.write("## Predicted Sentiments:")
87
+ for i, (sentiment_label, sentiment_emoji, sentiment_probabilities) in enumerate(predicted_sentiments):
88
+ st.write(f"Tweet {i+1}: {sentiment_label} {sentiment_emoji}")
89
+ st.write("Probabilities:")
90
+ for label, prob in sentiment_probabilities.items():
91
+ st.write(f"{label}: {prob:.4f}")
92
+ else:
93
+ st.write("Please enter tweet(s) to analyze.")