JustKiddo commited on
Commit
aab145f
·
verified ·
1 Parent(s): 289913c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -9
app.py CHANGED
@@ -67,7 +67,7 @@ class VietnameseChatbot:
67
  ]
68
 
69
  @st.cache_data
70
- def _compute_embeddings(queries):
71
  """
72
  Pre-compute embeddings for conversation queries
73
  Cached to avoid recomputing on every run
@@ -89,15 +89,10 @@ class VietnameseChatbot:
89
  except Exception as e:
90
  print(f"Embedding error: {e}")
91
  return None
92
-
93
- # Import these arguments to make the function self-contained
94
- from transformers import AutoTokenizer, AutoModel
95
- tokenizer = AutoTokenizer.from_pretrained('intfloat/multilingual-e5-small')
96
- model = AutoModel.from_pretrained('intfloat/multilingual-e5-small', torch_dtype=torch.float16)
97
-
98
  embeddings = []
99
- for query in queries:
100
- embedding = embed_single_text(query['query'], tokenizer, model)
101
  if embedding is not None:
102
  embeddings.append(embedding)
103
  return np.array(embeddings)
 
67
  ]
68
 
69
  @st.cache_data
70
+ def _compute_embeddings(self, _queries=None): # Add _queries parameter with underscore
71
  """
72
  Pre-compute embeddings for conversation queries
73
  Cached to avoid recomputing on every run
 
89
  except Exception as e:
90
  print(f"Embedding error: {e}")
91
  return None
92
+
 
 
 
 
 
93
  embeddings = []
94
+ for conversation in self.conversation_data: # Use self.conversation_data instead of queries
95
+ embedding = embed_single_text(conversation['query'], self.tokenizer, self.model) # Use self.tokenizer and self.model
96
  if embedding is not None:
97
  embeddings.append(embedding)
98
  return np.array(embeddings)