Spaces:
Runtime error
Runtime error
Feliks Zaslavskiy
commited on
Commit
•
f248e14
1
Parent(s):
ecdea0f
wip
Browse files- app.py +2 -1
- quick_evaluate.py +1 -0
- train.py +4 -2
app.py
CHANGED
@@ -15,7 +15,8 @@ from io import BytesIO
|
|
15 |
|
16 |
# For baseline 'sentence-transformers/paraphrase-albert-base-v2'
|
17 |
model_name = 'output/training_OnlineConstrativeLoss-2023-03-14_01-24-44'
|
18 |
-
model_name = 'output/training_OnlineConstrativeLoss-2023-03-
|
|
|
19 |
|
20 |
similarity_threshold = 0.9
|
21 |
|
|
|
15 |
|
16 |
# For baseline 'sentence-transformers/paraphrase-albert-base-v2'
|
17 |
model_name = 'output/training_OnlineConstrativeLoss-2023-03-14_01-24-44'
|
18 |
+
model_name = 'output/training_OnlineConstrativeLoss-2023-03-17_23-15-52'
|
19 |
+
model_name = 'output/training_OnlineConstrativeLoss-2023-03-17_23-50-15'
|
20 |
|
21 |
similarity_threshold = 0.9
|
22 |
|
quick_evaluate.py
CHANGED
@@ -13,6 +13,7 @@ model_name = 'output/training_OnlineConstrativeLoss-2023-03-10_11-17-15'
|
|
13 |
model_name = 'output/training_OnlineConstrativeLoss-2023-03-11_00-24-35'
|
14 |
model_name = 'output/training_OnlineConstrativeLoss-2023-03-11_01-00-19'
|
15 |
model_name='output/training_OnlineConstrativeLoss-2023-03-17_16-10-39'
|
|
|
16 |
model_sbert = SentenceTransformer(model_name)
|
17 |
|
18 |
|
|
|
13 |
model_name = 'output/training_OnlineConstrativeLoss-2023-03-11_00-24-35'
|
14 |
model_name = 'output/training_OnlineConstrativeLoss-2023-03-11_01-00-19'
|
15 |
model_name='output/training_OnlineConstrativeLoss-2023-03-17_16-10-39'
|
16 |
+
model_name='output/training_OnlineConstrativeLoss-2023-03-17_23-15-52'
|
17 |
model_sbert = SentenceTransformer(model_name)
|
18 |
|
19 |
|
train.py
CHANGED
@@ -24,10 +24,12 @@ logger = logging.getLogger(__name__)
|
|
24 |
|
25 |
|
26 |
#As base model, we use DistilBERT-base that was pre-trained on NLI and STSb data
|
27 |
-
|
|
|
|
|
28 |
num_epochs = 12
|
29 |
# Smaller is generally better more accurate results.
|
30 |
-
train_batch_size =
|
31 |
|
32 |
#As distance metric, we use cosine distance (cosine_distance = 1-cosine_similarity)
|
33 |
distance_metric = losses.SiameseDistanceMetric.COSINE_DISTANCE
|
|
|
24 |
|
25 |
|
26 |
#As base model, we use DistilBERT-base that was pre-trained on NLI and STSb data
|
27 |
+
model_name ='sentence-transformers/paraphrase-albert-base-v2'
|
28 |
+
model_name = 'sentence-transformers/all-mpnet-base-v1'
|
29 |
+
model = SentenceTransformer(model_name)
|
30 |
num_epochs = 12
|
31 |
# Smaller is generally better more accurate results.
|
32 |
+
train_batch_size = 10
|
33 |
|
34 |
#As distance metric, we use cosine distance (cosine_distance = 1-cosine_similarity)
|
35 |
distance_metric = losses.SiameseDistanceMetric.COSINE_DISTANCE
|