JasonTPhillipsJr commited on
Commit
d2568a6
1 Parent(s): 5f6c4ef

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -3
app.py CHANGED
@@ -8,7 +8,6 @@ from models.spabert.models.spatial_bert_model import SpatialBertConfig, SpatialB
8
  from models.spabert.utils.common_utils import load_spatial_bert_pretrained_weights
9
  from models.spabert.datasets.osm_sample_loader import PbfMapDataset
10
  from torch.utils.data import DataLoader
11
-
12
  from PIL import Image
13
 
14
  device = torch.device('cpu')
@@ -117,10 +116,10 @@ def get_bert_embedding(review_text):
117
  def get_spaBert_embedding(entity):
118
  entity_index = entity_index_dict.get(entity.lower(), None)
119
  if entity_index is None:
120
- st.write("Got Bert embedding for: ", entity)
121
  return get_bert_embedding(entity) #Fallback in-case SpaBERT could not resolve entity to retrieve embedding. Rare-cases only.
122
  else:
123
- st.write("Got SpaBert embedding for: ", entity)
124
  return spaBERT_embeddings[entity_index]
125
 
126
 
@@ -143,6 +142,37 @@ def processSpatialEntities(review, nlp):
143
  return processed_embedding
144
 
145
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
146
  # Function to read reviews from a text file
147
  def load_reviews_from_file(file_path):
148
  reviews = {}
 
8
  from models.spabert.utils.common_utils import load_spatial_bert_pretrained_weights
9
  from models.spabert.datasets.osm_sample_loader import PbfMapDataset
10
  from torch.utils.data import DataLoader
 
11
  from PIL import Image
12
 
13
  device = torch.device('cpu')
 
116
  def get_spaBert_embedding(entity):
117
  entity_index = entity_index_dict.get(entity.lower(), None)
118
  if entity_index is None:
119
+ #st.write("Got Bert embedding for: ", entity)
120
  return get_bert_embedding(entity) #Fallback in-case SpaBERT could not resolve entity to retrieve embedding. Rare-cases only.
121
  else:
122
+ #st.write("Got SpaBert embedding for: ", entity)
123
  return spaBERT_embeddings[entity_index]
124
 
125
 
 
142
  return processed_embedding
143
 
144
 
145
+ #Discriminator Initialization section
146
+ class Discriminator(nn.Module):
147
+ def __init__(self, input_size=512, hidden_sizes=[512], num_labels=2, dropout_rate=0.1):
148
+ super(Discriminator, self).__init__()
149
+ self.input_dropout = nn.Dropout(p=dropout_rate)
150
+ layers = []
151
+ hidden_sizes = [input_size] + hidden_sizes
152
+ for i in range(len(hidden_sizes)-1):
153
+ layers.extend([nn.Linear(hidden_sizes[i], hidden_sizes[i+1]), nn.LeakyReLU(0.2, inplace=True), nn.Dropout(dropout_rate)])
154
+
155
+ self.layers = nn.Sequential(*layers) #per il flatten
156
+ self.logit = nn.Linear(hidden_sizes[-1],num_labels+1) # +1 for the probability of this sample being fake/real.
157
+ self.softmax = nn.Softmax(dim=-1)
158
+
159
+ def forward(self, input_rep):
160
+ input_rep = self.input_dropout(input_rep)
161
+ last_rep = self.layers(input_rep)
162
+ logits = self.logit(last_rep)
163
+ probs = self.softmax(logits)
164
+ return last_rep, logits, probs
165
+
166
+ #dConfig = AutoConfig.from_pretrained("bert-base-uncased")
167
+ #hidden_size = int(dConfig.hidden_size)
168
+ #num_hidden_layers_d = 2;
169
+ #hidden_levels_d = [hidden_size for i in range(0, num_hidden_layers_d)]
170
+ #label_list = ["1", "0"]
171
+ #label_list.append('UNL')
172
+ #discriminator = Discriminator(input_size=hidden_size*2, hidden_sizes=hidden_levels_d, num_labels=len(label_list), dropout_rate=out_dropout_rate).to(device)
173
+
174
+
175
+
176
  # Function to read reviews from a text file
177
  def load_reviews_from_file(file_path):
178
  reviews = {}