JasonTPhillipsJr commited on
Commit
bf52bfd
1 Parent(s): 564da7a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -4
app.py CHANGED
@@ -23,7 +23,7 @@ bert_model.to(device)
23
  bert_model.eval()
24
 
25
  #SpaBERT Initialization Section
26
- data_file_path = 'models/spabert/datasets/SPABERT_finetuning_data_combined.json'
27
  pretrained_model_path = 'models/spabert/datasets/fine-spabert-base-uncased-finetuned-osm-mn.pth'
28
 
29
  config = SpatialBertConfig()
@@ -45,8 +45,8 @@ spatialDataset = PbfMapDataset(data_file_path = data_file_path,
45
  distance_norm_factor = 0.0001,
46
  spatial_dist_fill = 20,
47
  with_type = False,
48
- sep_between_neighbors = True, #Initially false, play around with this potentially?
49
- label_encoder = None, #Initially None, potentially change this because we do have real/fake reviews.
50
  mode = None) #If set to None it will use the full dataset for mlm
51
 
52
  data_loader = DataLoader(spatialDataset, batch_size=1, num_workers=0, shuffle=False, pin_memory=False, drop_last=False) #issue needs to be fixed with num_workers not stopping after finished
@@ -152,7 +152,10 @@ if st.button("Highlight Geo-Entities"):
152
 
153
  # Debug: Print the embeddings themselves (optional)
154
  st.write("Embeddings:", bert_embedding)
155
-
 
 
 
156
  # Process the text using spaCy
157
  doc = nlp(selected_review)
158
 
 
23
  bert_model.eval()
24
 
25
  #SpaBERT Initialization Section
26
+ data_file_path = 'models/spabert/datasets/SPABERT_finetuning_data_combined.json' #Make a new json file with only the geo entities needed, or it takes too long to run.
27
  pretrained_model_path = 'models/spabert/datasets/fine-spabert-base-uncased-finetuned-osm-mn.pth'
28
 
29
  config = SpatialBertConfig()
 
45
  distance_norm_factor = 0.0001,
46
  spatial_dist_fill = 20,
47
  with_type = False,
48
+ sep_between_neighbors = True,
49
+ label_encoder = None,
50
  mode = None) #If set to None it will use the full dataset for mlm
51
 
52
  data_loader = DataLoader(spatialDataset, batch_size=1, num_workers=0, shuffle=False, pin_memory=False, drop_last=False) #issue needs to be fixed with num_workers not stopping after finished
 
152
 
153
  # Debug: Print the embeddings themselves (optional)
154
  st.write("Embeddings:", bert_embedding)
155
+
156
+ combined_embedding = torch.cat((bert_embedding,all_embeddings[0]),dim=-1)
157
+ st.write("Concatenated Embedding Shape:", concatenated_embedding.shape)
158
+ st.write("Concatenated Embedding:", concatenated_embedding)
159
  # Process the text using spaCy
160
  doc = nlp(selected_review)
161