FremyCompany commited on
Commit
0f3913b
·
verified ·
1 Parent(s): b3b79e1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -2
app.py CHANGED
@@ -1,7 +1,7 @@
1
  import streamlit as st
2
  st.set_page_config(layout="wide")
3
 
4
- DEBUG = True
5
  if DEBUG:
6
  # Use some dummy data
7
  tokens = [
@@ -49,7 +49,6 @@ else:
49
  token_similarities = F.normalize(embeddings[0], dim=1) @ F.normalize(embeddings[1], dim=1).T
50
 
51
  sentence_similarity = F.normalize(torch.mean(embeddings[0], dim=0), dim=-1) @ F.normalize(torch.mean(embeddings[1], dim=0), dim=-1)
52
- sentence_similarity = max(0, round(sentence_similarity.item(), 2))
53
 
54
  #print("="*60)
55
  #print("Mapping sentence1 to sentence2...")
@@ -67,6 +66,11 @@ else:
67
  i = torch.argmax(token_probabilities_21[j])
68
  #print(tokens[1][j].ljust(15), tokens[0][i].ljust(15), round(token_probabilities_21[j][i].item(), 2))
69
 
 
 
 
 
 
70
  # Simplify the tokens for display
71
  tokens = [[token[3:].replace("\u2581", " ") for token in sentence] for sentence in tokens]
72
 
 
1
  import streamlit as st
2
  st.set_page_config(layout="wide")
3
 
4
+ DEBUG = False
5
  if DEBUG:
6
  # Use some dummy data
7
  tokens = [
 
49
  token_similarities = F.normalize(embeddings[0], dim=1) @ F.normalize(embeddings[1], dim=1).T
50
 
51
  sentence_similarity = F.normalize(torch.mean(embeddings[0], dim=0), dim=-1) @ F.normalize(torch.mean(embeddings[1], dim=0), dim=-1)
 
52
 
53
  #print("="*60)
54
  #print("Mapping sentence1 to sentence2...")
 
66
  i = torch.argmax(token_probabilities_21[j])
67
  #print(tokens[1][j].ljust(15), tokens[0][i].ljust(15), round(token_probabilities_21[j][i].item(), 2))
68
 
69
+ # Convert to naive python objects
70
+ sentence_similarity = max(0, round(sentence_similarity.item(), 2))
71
+ token_probabilities_12 = token_probabilities_12.numpy().tolist()
72
+ token_probabilities_21 = token_probabilities_21.numpy().tolist()
73
+
74
  # Simplify the tokens for display
75
  tokens = [[token[3:].replace("\u2581", " ") for token in sentence] for sentence in tokens]
76