Spaces:
Sleeping
Sleeping
Commit
·
c813716
1
Parent(s):
97fe9c2
Update load_model.py
Browse files- load_model.py +7 -2
load_model.py
CHANGED
@@ -4,7 +4,7 @@ import tensorflow as tf
|
|
4 |
from config import config
|
5 |
|
6 |
|
7 |
-
def
|
8 |
# Initialize and rebuild your Transformer model
|
9 |
# (Make sure to replace '...' with actual parameters)
|
10 |
model = Transformer(
|
@@ -24,4 +24,9 @@ def load_model(en_emb_matrix, de_emb_matrix, model_path, config):
|
|
24 |
def load_sp_model(path_en,path_ur):
|
25 |
sp_model_en = tf_text.SentencepieceTokenizer(model=tf.io.gfile.GFile(path_en, 'rb').read(),add_bos=True,add_eos=True)
|
26 |
sp_model_ur = tf_text.SentencepieceTokenizer(model=tf.io.gfile.GFile(path_ur, 'rb').read(),reverse=True,add_bos=True,add_eos=True)
|
27 |
-
return sp_model_en, sp_model_ur
|
|
|
|
|
|
|
|
|
|
|
|
4 |
from config import config
|
5 |
|
6 |
|
7 |
+
def load_transformer(en_emb_matrix, de_emb_matrix, model_path, config):
|
8 |
# Initialize and rebuild your Transformer model
|
9 |
# (Make sure to replace '...' with actual parameters)
|
10 |
model = Transformer(
|
|
|
24 |
def load_sp_model(path_en,path_ur):
|
25 |
sp_model_en = tf_text.SentencepieceTokenizer(model=tf.io.gfile.GFile(path_en, 'rb').read(),add_bos=True,add_eos=True)
|
26 |
sp_model_ur = tf_text.SentencepieceTokenizer(model=tf.io.gfile.GFile(path_ur, 'rb').read(),reverse=True,add_bos=True,add_eos=True)
|
27 |
+
return sp_model_en, sp_model_ur
|
28 |
+
|
29 |
+
def load_emb(emb_path):
|
30 |
+
with h5py.File(emb_path, 'r') as hf:
|
31 |
+
embedding_matrix = hf['embeddings'][:]
|
32 |
+
return embedding_matrix
|