File size: 154,810 Bytes
b19d152
 
 
 
 
 
 
 
fbb9fc4
b19d152
fbb9fc4
b19d152
 
 
 
fbb9fc4
b19d152
fbb9fc4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b19d152
fbb9fc4
b19d152
fbb9fc4
b19d152
fbb9fc4
b19d152
fbb9fc4
 
 
 
 
 
 
 
 
 
 
 
 
 
b19d152
fbb9fc4
 
 
 
 
b19d152
fbb9fc4
 
 
 
 
b19d152
fbb9fc4
 
 
 
b19d152
 
 
 
fbb9fc4
 
b19d152
 
 
 
 
 
 
 
 
 
 
 
 
 
fbb9fc4
b19d152
 
 
 
 
 
 
 
 
 
 
 
 
fbb9fc4
 
b19d152
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fbb9fc4
b19d152
 
fbb9fc4
 
 
b19d152
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fbb9fc4
b19d152
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fbb9fc4
b19d152
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fbb9fc4
b19d152
fbb9fc4
 
 
b19d152
fbb9fc4
 
 
 
b19d152
fbb9fc4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b19d152
 
 
 
 
 
 
 
 
 
 
 
 
fbb9fc4
 
 
 
b19d152
fbb9fc4
 
 
 
 
b19d152
 
 
 
 
 
 
 
fbb9fc4
 
 
 
b19d152
fbb9fc4
 
 
 
 
b19d152
 
 
 
 
 
 
 
 
 
 
fbb9fc4
b19d152
fbb9fc4
 
 
 
 
b19d152
 
 
 
 
 
 
 
fbb9fc4
 
 
 
b19d152
fbb9fc4
 
 
 
 
b19d152
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fbb9fc4
b19d152
fbb9fc4
 
 
 
 
b19d152
 
 
 
 
 
 
 
 
 
 
fbb9fc4
b19d152
fbb9fc4
 
 
 
 
b19d152
 
 
 
 
 
 
 
fbb9fc4
 
 
 
b19d152
fbb9fc4
 
 
 
 
b19d152
 
 
 
 
 
 
 
 
 
 
 
 
fbb9fc4
 
 
 
b19d152
fbb9fc4
 
 
 
 
b19d152
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fbb9fc4
b19d152
fbb9fc4
 
 
 
 
b19d152
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fbb9fc4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b19d152
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fbb9fc4
 
 
 
 
 
 
 
 
 
 
 
b19d152
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
---
language:
- en
tags:
- sentence-transformers
- sentence-similarity
- feature-extraction
- generated_from_trainer
- dataset_size:4731012
- loss:MultipleNegativesRankingLoss
- loss:CachedMultipleNegativesRankingLoss
- loss:SoftmaxLoss
- loss:CosineSimilarityLoss
base_model: tasksource/ModernBERT-base-nli
widget:
- source_sentence: Christa McAuliffe taught social studies at Concord High School.
  sentences:
  - The Football League play-offs for the 1994 -- 95 season were held in May 1995
    , with the finals taking place at Wembley Stadium in London .. Football League
    play-offs. Football League play-offs. 1994 Football League play-offs. Wembley
    Stadium. Wembley Stadium ( 1923 ). London. London. The play-off semi-finals were
    played over two legs and were contested by the teams who finished in 2nd , 3rd
    , 4th and 5th place in the Football League First Division and Football League
    Second Division and the 3rd , 4th , 5th , and 6th placed teams in the Football
    League Third Division table .. Football League First Division. 1994–95 Football
    League First Division. Football League Second Division. 1994–95 Football League
    Second Division. Football League Third Division. 1994–95 Football League Third
    Division. The winners of the semi-finals progressed through to the finals , with
    the winner of these matches gaining promotion for the following season .. following
    season. 1995-96 in English football
  - Sir Alexander Mackenzie Elementary is a public elementary school in Vancouver
    , British Columbia part of School District 39 Vancouver .. Vancouver. Vancouver,
    British Columbia. British Columbia. British Columbia. School District 39 Vancouver.
    School District 39 Vancouver. elementary school. elementary school
  - 'Help Wanted -LRB- Hataraku Hito  : Hard Working People in Japan , Job Island  :
    Hard Working People in Europe -RRB- is a game that features a collection of various
    , Wii Remote-based minigames .. Wii. Wii. Wii Remote. Wii Remote. The game is
    developed and published by Hudson Soft and was released in Japan for Nintendo
    ''s Wii on November 27 , 2008 , in Europe on March 13 , 2009 , in Australia on
    March 27 , 2009 , and in North America on May 12 , 2009 .. Hudson Soft. Hudson
    Soft. Wii. Wii. Nintendo. Nintendo'
- source_sentence: The researchers asked children of different ages to use words to
    form semantic correspondence. For example, when children see the words eagle,
    bear and robin, they combine them best according to their meaning. The results
    showed that older participants were more likely to develop different types of
    false memory than younger participants. Because there are many forms of classification
    in their minds. For example, young children classify eagles and robins as birds,
    while older children classify eagles and bears as predators. Compared with children,
    they have a concept of predators in their minds.
  sentences:
  - Extractive Industries Transparency Initiative is an organization
  - Mason heard a pun
  - Older children are more likely to have false memories than younger ones conforms
    to the context.
- source_sentence: 'Version 0.5 is released today. The biggest change is that this
    version finally has upload progress.

    Download it here:

    Or go to for more information about this project.

    Changelog:

    * Refactored the authentication_controller

    * Put before_filter :authorize in ApplicationController (and using skip_before_filter
    in other controllers if necessary)

    * Using ''unless'' instead of ''if not''

    * Using find_by() instead of find(:first)

    * Upload progress (yay!)

    Forums |

    Admin'
  sentences:
  - This example wikipedia comment contains an insult.
  - 'This text is about: hardware update'
  - The example summary is factually consistent with the full article.
- source_sentence: 'Make sure to make it to the Brew House in Pella, IA tomorrow @
    3 to meet with @user supporters! #SemST'
  sentences:
  - This example is ANT.
  - This example is valid question.
  - This example is favor.
- source_sentence: Also at increased risk are those whose immune systems suppressed
    by medications or by diseases such as cancer, diabetes and AIDS.
  sentences:
  - In 1995, the last survey, those numbers were equal.
  - Also at increased risk are those with suppressed immune systems due to illness
    or medicines.
  - Singapore stocks close 0.54 pct higher
datasets:
- tomaarsen/natural-questions-hard-negatives
- tomaarsen/gooaq-hard-negatives
- bclavie/msmarco-500k-triplets
- sentence-transformers/gooaq
- sentence-transformers/natural-questions
- tasksource/merged-2l-nli
- tasksource/merged-3l-nli
- tasksource/zero-shot-label-nli
- MoritzLaurer/dataset_train_nli
- google-research-datasets/paws
- nyu-mll/glue
- mwong/fever-evidence-related
- tasksource/sts-companion
pipeline_tag: sentence-similarity
library_name: sentence-transformers
---

# SentenceTransformer based on tasksource/ModernBERT-base-nli

This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [tasksource/ModernBERT-base-nli](https://huggingface.co/tasksource/ModernBERT-base-nli) on the [tomaarsen/natural-questions-hard-negatives](https://huggingface.co/datasets/tomaarsen/natural-questions-hard-negatives), [tomaarsen/gooaq-hard-negatives](https://huggingface.co/datasets/tomaarsen/gooaq-hard-negatives), [bclavie/msmarco-500k-triplets](https://huggingface.co/datasets/bclavie/msmarco-500k-triplets), [sentence-transformers/gooaq](https://huggingface.co/datasets/sentence-transformers/gooaq), [sentence-transformers/natural-questions](https://huggingface.co/datasets/sentence-transformers/natural-questions), [merged-2l-nli](https://huggingface.co/datasets/tasksource/merged-2l-nli), [merged-3l-nli](https://huggingface.co/datasets/tasksource/merged-3l-nli), [zero-shot-label-nli](https://huggingface.co/datasets/tasksource/zero-shot-label-nli), [dataset_train_nli](https://huggingface.co/datasets/MoritzLaurer/dataset_train_nli), [paws/labeled_final](https://huggingface.co/datasets/paws), [glue/mrpc](https://huggingface.co/datasets/glue), [glue/qqp](https://huggingface.co/datasets/glue), [fever-evidence-related](https://huggingface.co/datasets/mwong/fever-evidence-related), [glue/stsb](https://huggingface.co/datasets/glue), sick/relatedness and [sts-companion](https://huggingface.co/datasets/tasksource/sts-companion) datasets. It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.

## Model Details

### Model Description
- **Model Type:** Sentence Transformer
- **Base model:** [tasksource/ModernBERT-base-nli](https://huggingface.co/tasksource/ModernBERT-base-nli) <!-- at revision f07aa4a387ebb3f3b07d60086edff7646ae8b762 -->
- **Maximum Sequence Length:** 2048 tokens
- **Output Dimensionality:** 768 dimensions
- **Similarity Function:** Cosine Similarity
- **Training Datasets:**
    - [tomaarsen/natural-questions-hard-negatives](https://huggingface.co/datasets/tomaarsen/natural-questions-hard-negatives)
    - [tomaarsen/gooaq-hard-negatives](https://huggingface.co/datasets/tomaarsen/gooaq-hard-negatives)
    - [bclavie/msmarco-500k-triplets](https://huggingface.co/datasets/bclavie/msmarco-500k-triplets)
    - [sentence-transformers/gooaq](https://huggingface.co/datasets/sentence-transformers/gooaq)
    - [sentence-transformers/natural-questions](https://huggingface.co/datasets/sentence-transformers/natural-questions)
    - [merged-2l-nli](https://huggingface.co/datasets/tasksource/merged-2l-nli)
    - [merged-3l-nli](https://huggingface.co/datasets/tasksource/merged-3l-nli)
    - [zero-shot-label-nli](https://huggingface.co/datasets/tasksource/zero-shot-label-nli)
    - [dataset_train_nli](https://huggingface.co/datasets/MoritzLaurer/dataset_train_nli)
    - [paws/labeled_final](https://huggingface.co/datasets/paws)
    - [glue/mrpc](https://huggingface.co/datasets/glue)
    - [glue/qqp](https://huggingface.co/datasets/glue)
    - [fever-evidence-related](https://huggingface.co/datasets/mwong/fever-evidence-related)
    - [glue/stsb](https://huggingface.co/datasets/glue)
    - sick/relatedness
    - [sts-companion](https://huggingface.co/datasets/tasksource/sts-companion)
- **Language:** en
<!-- - **License:** Unknown -->

### Model Sources

- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
- **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)

### Full Model Architecture

```
SentenceTransformer(
  (0): Transformer({'max_seq_length': 2048, 'do_lower_case': False}) with Transformer model: ModernBertModel 
  (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
)
```

## Usage

### Direct Usage (Sentence Transformers)

First install the Sentence Transformers library:

```bash
pip install -U sentence-transformers
```

Then you can load this model and run inference.
```python
from sentence_transformers import SentenceTransformer

# Download from the 🤗 Hub
model = SentenceTransformer("tasksource/ModernBERT-base-embed")
# Run inference
sentences = [
    'Also at increased risk are those whose immune systems suppressed by medications or by diseases such as cancer, diabetes and AIDS.',
    'Also at increased risk are those with suppressed immune systems due to illness or medicines.',
    'In 1995, the last survey, those numbers were equal.',
]
embeddings = model.encode(sentences)
print(embeddings.shape)
# [3, 768]

# Get the similarity scores for the embeddings
similarities = model.similarity(embeddings, embeddings)
print(similarities.shape)
# [3, 3]
```

<!--
### Direct Usage (Transformers)

<details><summary>Click to see the direct usage in Transformers</summary>

</details>
-->

<!--
### Downstream Usage (Sentence Transformers)

You can finetune this model on your own dataset.

<details><summary>Click to expand</summary>

</details>
-->

<!--
### Out-of-Scope Use

*List how the model may foreseeably be misused and address what users ought not to do with the model.*
-->

<!--
## Bias, Risks and Limitations

*What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.*
-->

<!--
### Recommendations

*What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.*
-->

## Training Details

### Training Datasets

#### tomaarsen/natural-questions-hard-negatives

* Dataset: [tomaarsen/natural-questions-hard-negatives](https://huggingface.co/datasets/tomaarsen/natural-questions-hard-negatives) at [52dfa09](https://huggingface.co/datasets/tomaarsen/natural-questions-hard-negatives/tree/52dfa09a3d5d3f90e7e115c407ccebe30fe79764)
* Size: 96,658 training samples
* Columns: <code>query</code>, <code>answer</code>, <code>negative_1</code>, <code>negative_2</code>, <code>negative_3</code>, <code>negative_4</code>, and <code>negative_5</code>
* Approximate statistics based on the first 1000 samples:
  |         | query                                                                              | answer                                                                               | negative_1                                                                            | negative_2                                                                           | negative_3                                                                           | negative_4                                                                           | negative_5                                                                           |
  |:--------|:-----------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|
  | type    | string                                                                             | string                                                                               | string                                                                                | string                                                                               | string                                                                               | string                                                                               | string                                                                               |
  | details | <ul><li>min: 10 tokens</li><li>mean: 12.52 tokens</li><li>max: 26 tokens</li></ul> | <ul><li>min: 17 tokens</li><li>mean: 137.85 tokens</li><li>max: 556 tokens</li></ul> | <ul><li>min: 23 tokens</li><li>mean: 144.11 tokens</li><li>max: 1035 tokens</li></ul> | <ul><li>min: 13 tokens</li><li>mean: 142.73 tokens</li><li>max: 832 tokens</li></ul> | <ul><li>min: 15 tokens</li><li>mean: 146.37 tokens</li><li>max: 649 tokens</li></ul> | <ul><li>min: 19 tokens</li><li>mean: 145.79 tokens</li><li>max: 549 tokens</li></ul> | <ul><li>min: 19 tokens</li><li>mean: 142.01 tokens</li><li>max: 574 tokens</li></ul> |
* Samples:
  | query                                                           | answer                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                   | negative_1                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                               | negative_2                                                                                                                                                                                                                                                                                                    | negative_3                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                       | negative_4                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                               | negative_5                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                               |
  |:----------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>when did richmond last play in a preliminary final</code> | <code>Richmond Football Club Richmond began 2017 with 5 straight wins, a feat it had not achieved since 1995. A series of close losses hampered the Tigers throughout the middle of the season, including a 5-point loss to the Western Bulldogs, 2-point loss to Fremantle, and a 3-point loss to the Giants. Richmond ended the season strongly with convincing victories over Fremantle and St Kilda in the final two rounds, elevating the club to 3rd on the ladder. Richmond's first final of the season against the Cats at the MCG attracted a record qualifying final crowd of 95,028; the Tigers won by 51 points. Having advanced to the first preliminary finals for the first time since 2001, Richmond defeated Greater Western Sydney by 36 points in front of a crowd of 94,258 to progress to the Grand Final against Adelaide, their first Grand Final appearance since 1982. The attendance was 100,021, the largest crowd to a grand final since 1986. The Crows led at quarter time and led by as many as 13, but the Tig...</code> | <code>Brisbane Bears However, the club was still struggling off-field. One of the Bears' biggest problems was its lack of support (both on and off the field) in Melbourne, the location of most of its away matches. In mid-1996, the struggling Fitzroy Football Club collapsed due to financial pressures and was seeking to merge its assets with another club. When a merger with North Melbourne in forming the North Fitzroy Kangaroos failed to win the support of the other AFL clubs, a deal for a merger was done between Fitzroy and the Bears. The new team was known as the Brisbane Lions, based at the Gabba, with Northey as the coach of the merged club. As such, the history of the Brisbane Bears as an individual entity ended after the 1996 season, with ten seasons of competition and the third-place finish in 1996 as its best performance. The Bears last match as a separate entity was a preliminary final on Saturday 21 September 1996 at the Melbourne Cricket Ground (where the Bears played their first VF...</code> | <code>Virginia Tech–West Virginia football rivalry Virginia Tech held the trophy in six of the nine years in which it was contested, but West Virginia leads the all-time series 28–23–1. The last game was played on September 3, 2017 at FedEx Field in Landover, MD; Virginia Tech won 31–24.</code>       | <code>Martin Truex Jr. To start off the Round of 12, Truex scored his 6th win of the season at Charlotte after leading 91 out of 334 laps to secure a spot for the Round of 8. Just two weeks later, he scored another win at Kansas despite having a restart violation early in the race.</code>                                                                                                                                                                                                                                                                                                                                                                                | <code>Adelaide Football Club Star midfielder for many years Patrick Dangerfield left the club at the end of the 2015 season (a season in which he won the club's best and fairest) and Don Pyke, a former premiership player and assistant coach with West Coast who had also been an assistant coach at Adelaide from 2005 to 2006, was appointed Adelaide's senior coach for at least three years.[9] Adelaide was widely tipped to slide out of the finals in 2016[27][28][29] but the Crows proved to be one of the successes of the season, comfortably qualifying for a home elimination final and defeating North Melbourne by 62 points, before being eliminated the next week by eventual beaten grand finalists, Sydney in the semi-finals. The club had a dominant 2017 season, winning their opening six games and never falling below second place for the entire season. Adelaide claimed their second McClelland Trophy as minor premiers.[30] The Adelaide Crows entered the 2017 finals series as favourites for the premiers...</code> | <code>Battle of Appomattox Court House The Battle of Appomattox Court House (Virginia, U.S.), fought on the morning of April 9, 1865, was one of the last battles of the American Civil War (1861–1865). It was the final engagement of Confederate States Army General-in-Chief, Robert E. Lee, and his Army of Northern Virginia before it surrendered to the Union Army of the Potomac under the Commanding General of the United States, Ulysses S. Grant. Lee, having abandoned the Confederate capital of Richmond, Virginia, after the nine and one-half month Siege of Petersburg and Richmond, retreated west, hoping to join his army with the remaining Confederate forces in North Carolina, the Army of Tennessee under Gen. Joseph E. Johnston. Union infantry and cavalry forces under Gen. Philip Sheridan pursued and cut off the Confederates' retreat at the central Virginia village of Appomattox Court House. Lee launched a last-ditch attack to break through the Union forces to his front, assuming the Union forc...</code> |
  | <code>who sang what in the world's come over you</code>         | <code>Jack Scott (singer) At the beginning of 1960, Scott again changed record labels, this time to Top Rank Records.[1] He then recorded four Billboard Hot 100 hits – "What in the World's Come Over You" (#5), "Burning Bridges" (#3) b/w "Oh Little One" (#34), and "It Only Happened Yesterday" (#38).[1] "What in the World's Come Over You" was Scott's second gold disc winner.[6] Scott continued to record and perform during the 1960s and 1970s.[1] His song "You're Just Gettin' Better" reached the country charts in 1974.[1] In May 1977, Scott recorded a Peel session for BBC Radio 1 disc jockey, John Peel.</code>                                                                                                                                                                                                                                                                                                                                                                                                                   | <code>Lover, You Should've Come Over "Lover, You Should've Come Over" is the seventh track on Jeff Buckley's album Grace. Inspired by the ending of the relationship between Buckley and Rebecca Moore,[1] it concerns the despondency of a young man growing older, finding that his actions represent a perspective he feels that he should have outgrown. Biographer and critic David Browne describes the lyrics as "confused and confusing" and the music as "a languid beauty."[1]</code>                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                          | <code>It's Christmas (All Over The World) "It's Christmas (All Over The World)" is a song recorded by Scottish singer Sheena Easton. It was released in November 1985 as the theme song from the soundtrack of Santa Claus: The Movie. The song was written by Bill House and John Hobbs.</code>              | <code>The End of the World (Skeeter Davis song) "The End of the World" is a country pop song written by Arthur Kent and Sylvia Dee, for American singer Skeeter Davis. It had success in the 1960s and spawned many covers.</code>                                                                                                                                                                                                                                                                                                                                                                                                                                               | <code>Israel Kamakawiwoʻole His voice became famous outside Hawaii when his album Facing Future was released in 1993. His medley of "Somewhere Over the Rainbow/What a Wonderful World" was released on his albums Ka ʻAnoʻi and Facing Future. It was subsequently featured in several films, television programs, and television commercials.</code>                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                   | <code>Make the World Go Away "Make the World Go Away'" is a country-popular music song composed by Hank Cochran. It has become a Top 40 popular success three times: for Timi Yuro (during 1963), for Eddy Arnold (1965), and for the brother-sister duo Donny and Marie Osmond (1975). The original version of the song was recorded by Ray Price during 1963. It has remained a country crooner standard ever since.</code>                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                            |
  | <code>who produces the most wool in the world</code>            | <code>Wool Global wool production is about 2 million tonnes per year, of which 60% goes into apparel. Wool comprises ca 3% of the global textile market, but its value is higher owing to dying and other modifications of the material.[1] Australia is a leading producer of wool which is mostly from Merino sheep but has been eclipsed by China in terms of total weight.[30] New Zealand (2016) is the third-largest producer of wool, and the largest producer of crossbred wool. Breeds such as Lincoln, Romney, Drysdale, and Elliotdale produce coarser fibers, and wool from these sheep is usually used for making carpets.</code>                                                                                                                                                                                                                                                                                                                                                                                                           | <code>Baa, Baa, Black Sheep As with many nursery rhymes, attempts have been made to find origins and meanings for the rhyme, most which have no corroborating evidence.[1] Katherine Elwes Thomas in The Real Personages of Mother Goose (1930) suggested that the rhyme referred to resentment at the heavy taxation on wool.[5] This has particularly been taken to refer to the medieval English "Great" or "Old Custom" wool tax of 1275, which survived until the fifteenth century.[1] More recently the rhyme has been connected to the slave trade, particularly in the southern United States.[6] This explanation was advanced during debates over political correctness and the use and reform of nursery rhymes in the 1980s, but has no supporting historical evidence.[7] Rather than being negative, the wool of black sheep may have been prized as it could be made into dark cloth without dyeing.[6]</code>                                                                                                                           | <code>Raymond Group Raymond Group is an Indian branded fabric and fashion retailer, incorporated in 1925. It produces suiting fabric, with a capacity of producing 31 million meters of wool and wool-blended fabrics. Gautam Singhania is the chairman and managing director of the Raymond group.[3]</code> | <code>Silk in the Indian subcontinent Silk in the Indian subcontinent is a luxury good. In India, about 97% of the raw mulberry silk is produced in the five Indian states of Karnataka, Andhra Pradesh, Tamil Nadu, West Bengal and Jammu and Kashmir.[1] Mysore and North Bangalore, the upcoming site of a US$20 million "Silk City", contribute to a majority of silk production.[2] Another emerging silk producer is Tamil Nadu where mulberry cultivation is concentrated in Salem, Erode and Dharmapuri districts. Hyderabad, Andhra Pradesh and Gobichettipalayam, Tamil Nadu were the first locations to have automated silk reeling units.[3] yoyo quantity:::</code> | <code>F. W. Woolworth Company The two Woolworth brothers pioneered and developed merchandising, direct purchasing, sales, and customer service practices commonly used today. Despite its growing to be one of the largest retail chains in the world through most of the 20th century, increased competition led to its decline beginning in the 1980s, while its sporting goods division grew. The chain went out of business in July 1997, when the company decided to focus primarily on sporting goods and renamed itself Venator Group. By 2001, the company focused exclusively on the sporting goods market, changing its name to the present Foot Locker, Inc., changing its ticker symbol from its familiar Z in 2003 to its present ticker (NYSE: FL).</code>                                                                                                                                                                                                                                                                                | <code>Silk Silk's absorbency makes it comfortable to wear in warm weather and while active. Its low conductivity keeps warm air close to the skin during cold weather. It is often used for clothing such as shirts, ties, blouses, formal dresses, high fashion clothes, lining, lingerie, pajamas, robes, dress suits, sun dresses and Eastern folk costumes. For practical use, silk is excellent as clothing that protects from many biting insects that would ordinarily pierce clothing, such as mosquitoes and horseflies.</code>                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                 |
* Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
  ```json
  {
      "scale": 20.0,
      "similarity_fct": "cos_sim"
  }
  ```

#### tomaarsen/gooaq-hard-negatives

* Dataset: [tomaarsen/gooaq-hard-negatives](https://huggingface.co/datasets/tomaarsen/gooaq-hard-negatives) at [87594a1](https://huggingface.co/datasets/tomaarsen/gooaq-hard-negatives/tree/87594a1e6c58e88b5843afa9da3a97ffd75d01c2)
* Size: 200,000 training samples
* Columns: <code>question</code>, <code>answer</code>, <code>negative_1</code>, <code>negative_2</code>, <code>negative_3</code>, <code>negative_4</code>, and <code>negative_5</code>
* Approximate statistics based on the first 1000 samples:
  |         | question                                                                          | answer                                                                              | negative_1                                                                          | negative_2                                                                          | negative_3                                                                          | negative_4                                                                          | negative_5                                                                          |
  |:--------|:----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                              | string                                                                              | string                                                                              | string                                                                              | string                                                                              | string                                                                              |
  | details | <ul><li>min: 8 tokens</li><li>mean: 11.99 tokens</li><li>max: 22 tokens</li></ul> | <ul><li>min: 15 tokens</li><li>mean: 57.82 tokens</li><li>max: 138 tokens</li></ul> | <ul><li>min: 14 tokens</li><li>mean: 57.42 tokens</li><li>max: 125 tokens</li></ul> | <ul><li>min: 14 tokens</li><li>mean: 56.84 tokens</li><li>max: 120 tokens</li></ul> | <ul><li>min: 14 tokens</li><li>mean: 57.08 tokens</li><li>max: 155 tokens</li></ul> | <ul><li>min: 15 tokens</li><li>mean: 57.54 tokens</li><li>max: 129 tokens</li></ul> | <ul><li>min: 14 tokens</li><li>mean: 58.23 tokens</li><li>max: 195 tokens</li></ul> |
* Samples:
  | question                                           | answer                                                                                                                                                                                                                                                                                                                                          | negative_1                                                                                                                                                                                                                                                        | negative_2                                                                                                                                                                                                                                 | negative_3                                                                                                                                                                                                    | negative_4                                                                                                                                                                                                                                                                                                   | negative_5                                                                                                                                                                                                                                                                                                                                 |
  |:---------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>is toprol xl the same as metoprolol?</code>  | <code>Metoprolol succinate is also known by the brand name Toprol XL. It is the extended-release form of metoprolol. Metoprolol succinate is approved to treat high blood pressure, chronic chest pain, and congestive heart failure.</code>                                                                                                    | <code>Secondly, metoprolol and metoprolol ER have different brand-name equivalents: Brand version of metoprolol: Lopressor. Brand version of metoprolol ER: Toprol XL.</code>                                                                                     | <code>Pill with imprint 1 is White, Round and has been identified as Metoprolol Tartrate 25 mg.</code>                                                                                                                                     | <code>Interactions between your drugs No interactions were found between Allergy Relief and metoprolol. This does not necessarily mean no interactions exist. Always consult your healthcare provider.</code> | <code>Metoprolol is a type of medication called a beta blocker. It works by relaxing blood vessels and slowing heart rate, which improves blood flow and lowers blood pressure. Metoprolol can also improve the likelihood of survival after a heart attack.</code>                                          | <code>Metoprolol starts to work after about 2 hours, but it can take up to 1 week to fully take effect. You may not feel any different when you take metoprolol, but this doesn't mean it's not working. It's important to keep taking your medicine.</code>                                                                               |
  | <code>are you experienced cd steve hoffman?</code> | <code>The Are You Experienced album was apparently mastered from the original stereo UK master tapes (according to Steve Hoffman - one of the very few who has heard both the master tapes and the CDs produced over the years). ... The CD booklets were a little sparse, but at least they stayed true to the album's original design.</code> | <code>I Saw the Light. Showcasing the unique talent and musical influence of country-western artist Hank Williams, this candid biography also sheds light on the legacy of drug abuse and tormented relationships that contributes to the singer's legend.</code> | <code>(Read our ranking of his top 10.) And while Howard dresses the part of director, any notion of him as a tortured auteur or dictatorial taskmasker — the clichés of the Hollywood director — are tossed aside. He's very nice.</code> | <code>He was a music star too. Where're you people born and brought up? We 're born and brought up here in Anambra State at Nkpor town, near Onitsha.</code>                                                  | <code>At the age of 87 he has now retired from his live shows and all the traveling involved. And although he still picks up his Martin Guitar and does a show now and then, his life is now devoted to writing his memoirs.</code>                                                                          | <code>The owner of the mysterious voice behind all these videos is a man who's seen a lot, visiting a total of 56 intimate celebrity spaces over the course of five years. His name is Joe Sabia — that's him in the photo — and he's currently the VP of creative development at Condé Nast Entertainment.</code>                         |
  | <code>how are babushka dolls made?</code>          | <code>Matryoshka dolls are made of wood from lime, balsa, alder, aspen, and birch trees; lime is probably the most common wood type. ... After cutting, the trees are stripped of most of their bark, although a few inner rings of bark are left to bind the wood and keep it from splitting.</code>                                           | <code>A quick scan of the auction and buy-it-now listings on eBay finds porcelain doll values ranging from around $5 and $10 to several thousand dollars or more but no dolls listed above $10,000.</code>                                                        | <code>Japanese dolls are called as ningyō in Japanese and literally translates to 'human form'.</code>                                                                                                                                     | <code>Matyoo: All Fresno Girl dolls come just as real children are born.</code>                                                                                                                               | <code>As of September 2016, there are over 100 characters. The main toy line includes 13-inch Dolls, the mini-series, and a variety of mini play-sets and plush dolls as well as Lalaloopsy Littles, smaller siblings of the 13-inch dolls. A spin-off known as "Lala-Oopsies" came out in late 2012.</code> | <code>LOL dolls are little baby dolls that come wrapped inside a surprise toy ball. Each ball has layers that contain stickers, secret messages, mix and match accessories–and finally–a doll. ... The doll on the ball is almost never the doll inside. Dolls are released in series, so not every doll is available all the time.</code> |
* Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
  ```json
  {
      "scale": 20.0,
      "similarity_fct": "cos_sim"
  }
  ```

#### bclavie/msmarco-500k-triplets

* Dataset: [bclavie/msmarco-500k-triplets](https://huggingface.co/datasets/bclavie/msmarco-500k-triplets) at [cb1a85c](https://huggingface.co/datasets/bclavie/msmarco-500k-triplets/tree/cb1a85c1261fa7c65f4ea43f94e50f8b467c372f)
* Size: 200,000 training samples
* Columns: <code>query</code>, <code>positive</code>, and <code>negative</code>
* Approximate statistics based on the first 1000 samples:
  |         | query                                                                            | positive                                                                            | negative                                                                            |
  |:--------|:---------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
  | type    | string                                                                           | string                                                                              | string                                                                              |
  | details | <ul><li>min: 4 tokens</li><li>mean: 9.31 tokens</li><li>max: 31 tokens</li></ul> | <ul><li>min: 20 tokens</li><li>mean: 82.19 tokens</li><li>max: 216 tokens</li></ul> | <ul><li>min: 18 tokens</li><li>mean: 78.99 tokens</li><li>max: 209 tokens</li></ul> |
* Samples:
  | query                                                                              | positive                                                                                                                                                                                                                                                                                                                 | negative                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                  |
  |:-----------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>the most important factor that influences k+ secretion is __________.</code> | <code>The regulation of K+ distribution between the intracellular and extracellular space is referred to as internal K+ balance. The most important factors regulating this movement under normal conditions are insulin and catecholamines (1).</code>                                                                  | <code>They are both also important for secretion and flow of bile: 1  Cholecystokinin: The name of this hormone describes its effect on the biliary system-cholecysto = gallbladder and kinin = movement. 2  Secretin: This hormone is secreted in response to acid in the duodenum.</code>                                                                                                                                                                                                               |
  | <code>how much did the mackinac bridge cost to build</code>                        | <code>The cost to design the project was $3,500,000 (Steinman Company). The cost to construct the bridge was $70, 268,500. Two primary contractors were hired to build the bridge: American Bridge for superstructure - $44,532,900; and Merritt-Chapman and Scott of New York for the foundations - $25,735,600.</code> | <code>When your child needs a dental tooth bridge, you need to know the average cost so you can factor the price into your budget. Several factors affect the price of a bridge, which can run between $700 to $1,500 per tooth. If you have insurance or your child is covered by Medicaid, part of the cost may be covered.</code>                                                                                                                                                                      |
  | <code>when do concussion symptoms appear</code>                                    | <code>Then you can get advice on what to do next. For milder symptoms, the doctor may recommend rest and ask you to watch your child closely for changes, such as a headache that gets worse. Symptoms of a concussion don't always show up right away, and can develop within 24 to 72 hours after an injury.</code>    | <code>Concussion: A traumatic injury to soft tissue, usually the brain, as a result of a violent blow, shaking, or spinning. A brain concussion can cause immediate but temporary impairment of brain functions, such as thinking, vision, equilibrium, and consciousness. After a person has had a concussion, he or she is at increased risk for recurrence. Moreover, after a person has several concussions, less of a blow can cause injury, and the person can require more time to recover.</code> |
* Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
  ```json
  {
      "scale": 20.0,
      "similarity_fct": "cos_sim"
  }
  ```

#### sentence-transformers/gooaq

* Dataset: [sentence-transformers/gooaq](https://huggingface.co/datasets/sentence-transformers/gooaq) at [b089f72](https://huggingface.co/datasets/sentence-transformers/gooaq/tree/b089f728748a068b7bc5234e5bcf5b25e3c8279c)
* Size: 200,000 training samples
* Columns: <code>question</code> and <code>answer</code>
* Approximate statistics based on the first 1000 samples:
  |         | question                                                                          | answer                                                                              |
  |:--------|:----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                              |
  | details | <ul><li>min: 8 tokens</li><li>mean: 12.19 tokens</li><li>max: 22 tokens</li></ul> | <ul><li>min: 13 tokens</li><li>mean: 58.34 tokens</li><li>max: 124 tokens</li></ul> |
* Samples:
  | question                                           | answer                                                                                                                                                                                                                                                                                                                                          |
  |:---------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>is toprol xl the same as metoprolol?</code>  | <code>Metoprolol succinate is also known by the brand name Toprol XL. It is the extended-release form of metoprolol. Metoprolol succinate is approved to treat high blood pressure, chronic chest pain, and congestive heart failure.</code>                                                                                                    |
  | <code>are you experienced cd steve hoffman?</code> | <code>The Are You Experienced album was apparently mastered from the original stereo UK master tapes (according to Steve Hoffman - one of the very few who has heard both the master tapes and the CDs produced over the years). ... The CD booklets were a little sparse, but at least they stayed true to the album's original design.</code> |
  | <code>how are babushka dolls made?</code>          | <code>Matryoshka dolls are made of wood from lime, balsa, alder, aspen, and birch trees; lime is probably the most common wood type. ... After cutting, the trees are stripped of most of their bark, although a few inner rings of bark are left to bind the wood and keep it from splitting.</code>                                           |
* Loss: [<code>CachedMultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedmultiplenegativesrankingloss) with these parameters:
  ```json
  {
      "scale": 20.0,
      "similarity_fct": "cos_sim"
  }
  ```

#### sentence-transformers/natural-questions

* Dataset: [sentence-transformers/natural-questions](https://huggingface.co/datasets/sentence-transformers/natural-questions) at [f9e894e](https://huggingface.co/datasets/sentence-transformers/natural-questions/tree/f9e894e1081e206e577b4eaa9ee6de2b06ae6f17)
* Size: 100,231 training samples
* Columns: <code>query</code> and <code>answer</code>
* Approximate statistics based on the first 1000 samples:
  |         | query                                                                              | answer                                                                               |
  |:--------|:-----------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|
  | type    | string                                                                             | string                                                                               |
  | details | <ul><li>min: 10 tokens</li><li>mean: 12.47 tokens</li><li>max: 23 tokens</li></ul> | <ul><li>min: 17 tokens</li><li>mean: 138.32 tokens</li><li>max: 556 tokens</li></ul> |
* Samples:
  | query                                                           | answer                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                   |
  |:----------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>when did richmond last play in a preliminary final</code> | <code>Richmond Football Club Richmond began 2017 with 5 straight wins, a feat it had not achieved since 1995. A series of close losses hampered the Tigers throughout the middle of the season, including a 5-point loss to the Western Bulldogs, 2-point loss to Fremantle, and a 3-point loss to the Giants. Richmond ended the season strongly with convincing victories over Fremantle and St Kilda in the final two rounds, elevating the club to 3rd on the ladder. Richmond's first final of the season against the Cats at the MCG attracted a record qualifying final crowd of 95,028; the Tigers won by 51 points. Having advanced to the first preliminary finals for the first time since 2001, Richmond defeated Greater Western Sydney by 36 points in front of a crowd of 94,258 to progress to the Grand Final against Adelaide, their first Grand Final appearance since 1982. The attendance was 100,021, the largest crowd to a grand final since 1986. The Crows led at quarter time and led by as many as 13, but the Tig...</code> |
  | <code>who sang what in the world's come over you</code>         | <code>Jack Scott (singer) At the beginning of 1960, Scott again changed record labels, this time to Top Rank Records.[1] He then recorded four Billboard Hot 100 hits – "What in the World's Come Over You" (#5), "Burning Bridges" (#3) b/w "Oh Little One" (#34), and "It Only Happened Yesterday" (#38).[1] "What in the World's Come Over You" was Scott's second gold disc winner.[6] Scott continued to record and perform during the 1960s and 1970s.[1] His song "You're Just Gettin' Better" reached the country charts in 1974.[1] In May 1977, Scott recorded a Peel session for BBC Radio 1 disc jockey, John Peel.</code>                                                                                                                                                                                                                                                                                                                                                                                                                   |
  | <code>who produces the most wool in the world</code>            | <code>Wool Global wool production is about 2 million tonnes per year, of which 60% goes into apparel. Wool comprises ca 3% of the global textile market, but its value is higher owing to dying and other modifications of the material.[1] Australia is a leading producer of wool which is mostly from Merino sheep but has been eclipsed by China in terms of total weight.[30] New Zealand (2016) is the third-largest producer of wool, and the largest producer of crossbred wool. Breeds such as Lincoln, Romney, Drysdale, and Elliotdale produce coarser fibers, and wool from these sheep is usually used for making carpets.</code>                                                                                                                                                                                                                                                                                                                                                                                                           |
* Loss: [<code>CachedMultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedmultiplenegativesrankingloss) with these parameters:
  ```json
  {
      "scale": 20.0,
      "similarity_fct": "cos_sim"
  }
  ```

#### merged-2l-nli

* Dataset: [merged-2l-nli](https://huggingface.co/datasets/tasksource/merged-2l-nli) at [af845c6](https://huggingface.co/datasets/tasksource/merged-2l-nli/tree/af845c6b78a8ac3ea294666c2e5132cf6d5f4af0)
* Size: 425,243 training samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                           | sentence2                                                                         | label                                           |
  |:--------|:------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:------------------------------------------------|
  | type    | string                                                                              | string                                                                            | int                                             |
  | details | <ul><li>min: 4 tokens</li><li>mean: 83.27 tokens</li><li>max: 1202 tokens</li></ul> | <ul><li>min: 5 tokens</li><li>mean: 16.7 tokens</li><li>max: 126 tokens</li></ul> | <ul><li>0: ~52.90%</li><li>1: ~47.10%</li></ul> |
* Samples:
  | sentence1                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                | sentence2                                                                 | label          |
  |:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------|:---------------|
  | <code>In 1783 , the Sunni Al-Khalifa family captured Bahrain from the Persians .</code>                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                  | <code>The is a geographical/political entity</code>                       | <code>0</code> |
  | <code>::stage Egg:: Newt eggs are encased in a gel-like substance rather than a hard shell. Adult females release eggs one at a time and store them in clusters ranging from a handful to several dozen in size. Adults often take an active role in defending their eggs after depositing them. Mothers may curl their body around the eggs to provide protection. Some newt species even wrap leaves around each egg individually to camouflage them, according to San Diego Zoo. Newt eggs are small: some measure only a millimeter or two in diameter. Mom usually anchors her eggs to underwater plants and other structures to keep them safe. ::stage Tadpole:: Newts that hatch from submerged eggs usually emerge as aquatic larvae with fishlike tails and gills that allow them to breathe beneath the water's surface. Not all newt species have an aquatic or 'tadpole' phase. This tadpole stage tends to be short, except in fully aquatic species. Eastern newt (Notophthalmus viridescens) larvae spend only a few months as...</code> | <code>Tadpole thing is a newt's terrestrial larval phase known as.</code> | <code>0</code> |
  | <code>Target <br><br>You are now a valid target, you nasty little shit! 86.176.169.49</code>                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             | <code>This example wikipedia comment contains an insult.</code>           | <code>1</code> |
* Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss)

#### merged-3l-nli

* Dataset: [merged-3l-nli](https://huggingface.co/datasets/tasksource/merged-3l-nli) at [e311b1f](https://huggingface.co/datasets/tasksource/merged-3l-nli/tree/e311b1f45a8f8cc8d4b2c5b92dbc797a05bc069d)
* Size: 564,204 training samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                            | sentence2                                                                          | label                                                              |
  |:--------|:-------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|:-------------------------------------------------------------------|
  | type    | string                                                                               | string                                                                             | int                                                                |
  | details | <ul><li>min: 5 tokens</li><li>mean: 110.76 tokens</li><li>max: 2048 tokens</li></ul> | <ul><li>min: 5 tokens</li><li>mean: 28.37 tokens</li><li>max: 485 tokens</li></ul> | <ul><li>0: ~36.00%</li><li>1: ~32.70%</li><li>2: ~31.30%</li></ul> |
* Samples:
  | sentence1                                                                                                                    | sentence2                                                                                                                                                                                                                                                                     | label          |
  |:-----------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------|
  | <code>Iceland does not have a high latitude.</code>                                                                          | <code>Iceland . Iceland is warmed by the Gulf Stream and has a temperate climate , despite a high latitude just outside the Arctic Circle . Its high latitude and marine influence still keeps summers chilly , with most of the archipelago having a tundra climate .</code> | <code>2</code> |
  | <code>The populist, by contrast, panders to his audience, figuring out what it likes and then delivering it in heaps.</code> | <code>Populists hate the audience and antagonizes them; so their support, as a tyrant's, is similarly lacking.</code>                                                                                                                                                         | <code>2</code> |
  | <code>The prison sentence of that convict will end after 2 months.</code>                                                    | <code>Before 212 days, the prison sentence of that convict will end.</code>                                                                                                                                                                                                   | <code>1</code> |
* Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss)

#### zero-shot-label-nli

* Dataset: [zero-shot-label-nli](https://huggingface.co/datasets/tasksource/zero-shot-label-nli) at [b363c89](https://huggingface.co/datasets/tasksource/zero-shot-label-nli/tree/b363c895cd4b15b814b9dbd7e4466cd301c96b2a)
* Size: 1,090,333 training samples
* Columns: <code>label</code>, <code>sentence1</code>, and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | label                                           | sentence1                                                                           | sentence2                                                                        |
  |:--------|:------------------------------------------------|:------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|
  | type    | int                                             | string                                                                              | string                                                                           |
  | details | <ul><li>0: ~50.20%</li><li>2: ~49.80%</li></ul> | <ul><li>min: 3 tokens</li><li>mean: 66.27 tokens</li><li>max: 2048 tokens</li></ul> | <ul><li>min: 7 tokens</li><li>mean: 8.07 tokens</li><li>max: 17 tokens</li></ul> |
* Samples:
  | label          | sentence1                                                                                                                                                                    | sentence2                                |
  |:---------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------|
  | <code>0</code> | <code>The LAY MAN! Just to let you know you are missed and thought off. Do have a great day. And if you can send me bimbo and ugo's numbers, ill appreciate. Safe<br></code> | <code>This example is ham.</code>        |
  | <code>2</code> | <code>Crisp: oh really!!!!</code>                                                                                                                                            | <code>This example is Automotive.</code> |
  | <code>2</code> | <code>Insurance policies should be simple .</code>                                                                                                                           | <code>This example is negative.</code>   |
* Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss)

#### dataset_train_nli

* Dataset: [dataset_train_nli](https://huggingface.co/datasets/MoritzLaurer/dataset_train_nli) at [1e00964](https://huggingface.co/datasets/MoritzLaurer/dataset_train_nli/tree/1e009645b2943106614107b06107b1ee85ac1161)
* Size: 1,018,733 training samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                           | sentence2                                                                         | label                                           |
  |:--------|:------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:------------------------------------------------|
  | type    | string                                                                              | string                                                                            | int                                             |
  | details | <ul><li>min: 4 tokens</li><li>mean: 95.56 tokens</li><li>max: 1152 tokens</li></ul> | <ul><li>min: 8 tokens</li><li>mean: 14.05 tokens</li><li>max: 38 tokens</li></ul> | <ul><li>0: ~50.60%</li><li>1: ~49.40%</li></ul> |
* Samples:
  | sentence1                                                                                                                                                                                                                                                                   | sentence2                                                           | label          |
  |:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------|:---------------|
  | <code>where is nayagara falls located</code>                                                                                                                                                                                                                                | <code>The example utterance is a query about music.</code>          | <code>1</code> |
  | <code>Druyun gets nine-month prison sentence A former top Air Force acquisition executive today was sentenced to nine months in prison for conspiring to help Boeing Co. win a multibillion-dollar Pentagon contract.</code>                                                | <code>This example news text is about world news</code>             | <code>1</code> |
  | <code>Writing on the  #39;wall #39; n Last edition of the Far Eastern Economic Review is shown on the streets of Hong Kong. The weekly news magazine is to fold in its current form with the loss of 80 jobs, the magazine #39;s publisher Dow Jones said yesterday.</code> | <code>This example news text is about science and technology</code> | <code>1</code> |
* Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss)

#### paws/labeled_final

* Dataset: [paws/labeled_final](https://huggingface.co/datasets/paws) at [161ece9](https://huggingface.co/datasets/paws/tree/161ece9501cf0a11f3e48bd356eaa82de46d6a09)
* Size: 49,401 training samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                          | sentence2                                                                         | label                                           |
  |:--------|:-----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:------------------------------------------------|
  | type    | string                                                                             | string                                                                            | int                                             |
  | details | <ul><li>min: 10 tokens</li><li>mean: 27.44 tokens</li><li>max: 51 tokens</li></ul> | <ul><li>min: 8 tokens</li><li>mean: 27.44 tokens</li><li>max: 51 tokens</li></ul> | <ul><li>0: ~55.60%</li><li>1: ~44.40%</li></ul> |
* Samples:
  | sentence1                                                                                                                                                                   | sentence2                                                                                                                                                                           | label          |
  |:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------|
  | <code>In Paris , in October 1560 , he secretly met the English ambassador , Nicolas Throckmorton , asking him for a passport to return to England through Scotland .</code> | <code>In October 1560 , he secretly met with the English ambassador , Nicolas Throckmorton , in Paris , and asked him for a passport to return to Scotland through England .</code> | <code>0</code> |
  | <code>The NBA season of 1975 -- 76 was the 30th season of the National Basketball Association .</code>                                                                      | <code>The 1975 -- 76 season of the National Basketball Association was the 30th season of the NBA .</code>                                                                          | <code>1</code> |
  | <code>There are also specific discussions , public profile debates and project discussions .</code>                                                                         | <code>There are also public discussions , profile specific discussions , and project discussions .</code>                                                                           | <code>0</code> |
* Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss)

#### glue/mrpc

* Dataset: [glue/mrpc](https://huggingface.co/datasets/glue) at [bcdcba7](https://huggingface.co/datasets/glue/tree/bcdcba79d07bc864c1c254ccfcedcce55bcc9a8c)
* Size: 3,668 training samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                          | sentence2                                                                          | label                                           |
  |:--------|:-----------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|:------------------------------------------------|
  | type    | string                                                                             | string                                                                             | int                                             |
  | details | <ul><li>min: 10 tokens</li><li>mean: 27.55 tokens</li><li>max: 49 tokens</li></ul> | <ul><li>min: 12 tokens</li><li>mean: 27.25 tokens</li><li>max: 48 tokens</li></ul> | <ul><li>0: ~33.70%</li><li>1: ~66.30%</li></ul> |
* Samples:
  | sentence1                                                                                                              | sentence2                                                                                                                        | label          |
  |:-----------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------|:---------------|
  | <code>Amrozi accused his brother , whom he called " the witness " , of deliberately distorting his evidence .</code>   | <code>Referring to him as only " the witness " , Amrozi accused his brother of deliberately distorting his evidence .</code>     | <code>1</code> |
  | <code>Yucaipa owned Dominick 's before selling the chain to Safeway in 1998 for $ 2.5 billion .</code>                 | <code>Yucaipa bought Dominick 's in 1995 for $ 693 million and sold it to Safeway for $ 1.8 billion in 1998 .</code>             | <code>0</code> |
  | <code>They had published an advertisement on the Internet on June 10 , offering the cargo for sale , he added .</code> | <code>On June 10 , the ship 's owners had published an advertisement on the Internet , offering the explosives for sale .</code> | <code>1</code> |
* Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss)

#### glue/qqp

* Dataset: [glue/qqp](https://huggingface.co/datasets/glue) at [bcdcba7](https://huggingface.co/datasets/glue/tree/bcdcba79d07bc864c1c254ccfcedcce55bcc9a8c)
* Size: 363,846 training samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                         | label                                           |
  |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:------------------------------------------------|
  | type    | string                                                                            | string                                                                            | int                                             |
  | details | <ul><li>min: 4 tokens</li><li>mean: 15.63 tokens</li><li>max: 50 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 15.55 tokens</li><li>max: 76 tokens</li></ul> | <ul><li>0: ~62.70%</li><li>1: ~37.30%</li></ul> |
* Samples:
  | sentence1                                                                            | sentence2                                                                                   | label          |
  |:-------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------|:---------------|
  | <code>How can I stop my laptop from hibernating in windows 10?</code>                | <code>How do I shutdown windows 10 instead of hibernating it?</code>                        | <code>0</code> |
  | <code>Is it worth the cost if ever I fix my gap teeth?</code>                        | <code>Is it worth it to fix teeth gap?</code>                                               | <code>1</code> |
  | <code>Why is USA the biggest threat to the global economy and Germany is not?</code> | <code>What is the biggest threat to the global economy over the next year (in 2011)?</code> | <code>0</code> |
* Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss)

#### fever-evidence-related

* Dataset: [fever-evidence-related](https://huggingface.co/datasets/mwong/fever-evidence-related) at [14aba00](https://huggingface.co/datasets/mwong/fever-evidence-related/tree/14aba009b5fcd97b1a9ee6f3e3b0da0e308cf7cb)
* Size: 403,218 training samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                             | label                                           |
  |:--------|:----------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------|:------------------------------------------------|
  | type    | string                                                                            | string                                                                                | int                                             |
  | details | <ul><li>min: 7 tokens</li><li>mean: 13.58 tokens</li><li>max: 59 tokens</li></ul> | <ul><li>min: 33 tokens</li><li>mean: 344.03 tokens</li><li>max: 2048 tokens</li></ul> | <ul><li>0: ~32.00%</li><li>1: ~68.00%</li></ul> |
* Samples:
  | sentence1                                                                 | sentence2                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             | label          |
  |:--------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------|
  | <code>The Last of Us Part II had the developer Naughty Dog.</code>        | <code>Bishop Asbury Cottage is a 17th-century cottage on Newton Road , Great Barr , England , known for being the boyhood home of Francis Asbury -LRB- 1745 -- 1816 -RRB- , one of the first two bishops of the Methodist Episcopal Church -LRB- now The United Methodist Church -RRB- in the United States .. Cottage. Cottage. Great Barr. Great Barr. England. England. Francis Asbury. Francis Asbury. bishops. Bishop ( Methodism ). Methodist Episcopal Church. Methodist Episcopal Church. The United Methodist Church. The United Methodist Church. It is now a museum in his memory .</code> | <code>1</code> |
  | <code>Boomerang (1992 film) was released on July.</code>                  | <code>Petr Alekseyevich Bezobrazov -LRB- 29 January 1845 -- 17 July 1906 -RRB- was an admiral in the Imperial Russian Navy .. Imperial Russian Navy. Imperial Russian Navy</code>                                                                                                                                                                                                                                                                                                                                                                                                                     | <code>1</code> |
  | <code>G-Dragon was the first Korean solo artist to a type of tour.</code> | <code>The Scott Viking 2 was the first British high performance two seat sailplane , flying a few days before the outbreak of World War II .. World War II. World War II. Only one was built ; it was used in radar station trials in the Summer of 1940 .</code>                                                                                                                                                                                                                                                                                                                                     | <code>1</code> |
* Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss)

#### glue/stsb

* Dataset: [glue/stsb](https://huggingface.co/datasets/glue) at [bcdcba7](https://huggingface.co/datasets/glue/tree/bcdcba79d07bc864c1c254ccfcedcce55bcc9a8c)
* Size: 5,749 training samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                        | sentence2                                                                         | label                                                          |
  |:--------|:---------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:---------------------------------------------------------------|
  | type    | string                                                                           | string                                                                            | float                                                          |
  | details | <ul><li>min: 6 tokens</li><li>mean: 15.0 tokens</li><li>max: 48 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 15.02 tokens</li><li>max: 51 tokens</li></ul> | <ul><li>min: 0.0</li><li>mean: 2.73</li><li>max: 5.0</li></ul> |
* Samples:
  | sentence1                                                                 | sentence2                                                               | label                          |
  |:--------------------------------------------------------------------------|:------------------------------------------------------------------------|:-------------------------------|
  | <code>Syria peace plan conditions “unacceptable,” opposition says</code>  | <code>Syria peace dashed as deadline passes</code>                      | <code>2.0</code>               |
  | <code>Romney picks Ryan as vice presidential running mate: source</code>  | <code>Romney to tap Ryan as vice presidential running mate</code>       | <code>5.0</code>               |
  | <code>Death toll rises to 6 as Storm Xaver batters northern Europe</code> | <code>Storm death toll rises as wind, rain batters north. Europe</code> | <code>3.200000047683716</code> |
* Loss: [<code>CosineSimilarityLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cosinesimilarityloss) with these parameters:
  ```json
  {
      "loss_fct": "torch.nn.modules.loss.MSELoss"
  }
  ```

#### sick/relatedness

* Dataset: sick/relatedness
* Size: 4,439 training samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                         | label                                                         |
  |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:--------------------------------------------------------------|
  | type    | string                                                                            | string                                                                            | float                                                         |
  | details | <ul><li>min: 6 tokens</li><li>mean: 12.08 tokens</li><li>max: 30 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 11.86 tokens</li><li>max: 23 tokens</li></ul> | <ul><li>min: 1.0</li><li>mean: 3.5</li><li>max: 5.0</li></ul> |
* Samples:
  | sentence1                                                                                  | sentence2                                                                                                  | label                           |
  |:-------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------|:--------------------------------|
  | <code>The man is standing on a rocky mountain and gray clouds are in the background</code> | <code>A black topless person is packing a pile of rocks and a front of clouds are in the background</code> | <code>2.9000000953674316</code> |
  | <code>A man is standing on a dirt hill next to a black jeep</code>                         | <code>A man in a hat is standing outside of a green vehicle</code>                                         | <code>2.5999999046325684</code> |
  | <code>A man is talking on a cell phone</code>                                              | <code>A man is making a phone call</code>                                                                  | <code>4.300000190734863</code>  |
* Loss: [<code>CosineSimilarityLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cosinesimilarityloss) with these parameters:
  ```json
  {
      "loss_fct": "torch.nn.modules.loss.MSELoss"
  }
  ```

#### sts-companion

* Dataset: [sts-companion](https://huggingface.co/datasets/tasksource/sts-companion) at [fd8beff](https://huggingface.co/datasets/tasksource/sts-companion/tree/fd8beffb788df5f6673bc688e6dcbe3690a3acc6)
* Size: 5,289 training samples
* Columns: <code>label</code>, <code>sentence1</code>, and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | label                                                          | sentence1                                                                         | sentence2                                                                         |
  |:--------|:---------------------------------------------------------------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|
  | type    | float                                                          | string                                                                            | string                                                                            |
  | details | <ul><li>min: 0.0</li><li>mean: 3.02</li><li>max: 5.0</li></ul> | <ul><li>min: 5 tokens</li><li>mean: 17.69 tokens</li><li>max: 60 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 16.26 tokens</li><li>max: 51 tokens</li></ul> |
* Samples:
  | label             | sentence1                                                                                                                    | sentence2                                                                                                                           |
  |:------------------|:-----------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------|
  | <code>4.25</code> | <code>It then appointed a task force to formulate the necessary changes in tax and spending policies.</code>                 | <code>He has appointed a working party to make the necessary changes to the policies of public spending and fiscal policies.</code> |
  | <code>4.25</code> | <code>festive social event, celebration</code>                                                                               | <code>an occasion on which people can assemble for social interaction and entertainment.</code>                                     |
  | <code>3.6</code>  | <code>Who'd have thought an American hero could be a Canadian? NYT: Man Who Sheltered Americans in Tehran, Dies at 88</code> | <code>John Sheardown, Canadian Who Sheltered Americans in Tehran, Dies at 88</code>                                                 |
* Loss: [<code>CosineSimilarityLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cosinesimilarityloss) with these parameters:
  ```json
  {
      "loss_fct": "torch.nn.modules.loss.MSELoss"
  }
  ```

### Evaluation Datasets

#### merged-2l-nli

* Dataset: [merged-2l-nli](https://huggingface.co/datasets/tasksource/merged-2l-nli) at [af845c6](https://huggingface.co/datasets/tasksource/merged-2l-nli/tree/af845c6b78a8ac3ea294666c2e5132cf6d5f4af0)
* Size: 4,053 evaluation samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                           | sentence2                                                                         | label                                           |
  |:--------|:------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:------------------------------------------------|
  | type    | string                                                                              | string                                                                            | int                                             |
  | details | <ul><li>min: 6 tokens</li><li>mean: 75.82 tokens</li><li>max: 1219 tokens</li></ul> | <ul><li>min: 5 tokens</li><li>mean: 13.5 tokens</li><li>max: 158 tokens</li></ul> | <ul><li>0: ~51.00%</li><li>1: ~49.00%</li></ul> |
* Samples:
  | sentence1                                                                                 | sentence2                                                                                                                                                                                                            | label          |
  |:------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------|
  | <code>What happens to the norm when a number is multiplied by p?</code>                   | <code>While completing Q (roughly, filling the gaps) with respect to the absolute value yields the field of real numbers, completing with respect to the p-adic norm |−|p yields the field of p-adic numbers.</code> | <code>0</code> |
  | <code>The abode of the Greek gods was on the summit of Mount Olympus, in Thessaly.</code> | <code>Mount Olympus is in Thessaly.</code>                                                                                                                                                                           | <code>1</code> |
  | <code>The drain is clogged with hair. It has to be cleaned.</code>                        | <code>The hair has to be cleaned.</code>                                                                                                                                                                             | <code>0</code> |
* Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss)

#### merged-3l-nli

* Dataset: [merged-3l-nli](https://huggingface.co/datasets/tasksource/merged-3l-nli) at [e311b1f](https://huggingface.co/datasets/tasksource/merged-3l-nli/tree/e311b1f45a8f8cc8d4b2c5b92dbc797a05bc069d)
* Size: 2,872 evaluation samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                            | sentence2                                                                         | label                                                              |
  |:--------|:-------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:-------------------------------------------------------------------|
  | type    | string                                                                               | string                                                                            | int                                                                |
  | details | <ul><li>min: 6 tokens</li><li>mean: 167.67 tokens</li><li>max: 2048 tokens</li></ul> | <ul><li>min: 2 tokens</li><li>mean: 23.6 tokens</li><li>max: 430 tokens</li></ul> | <ul><li>0: ~38.20%</li><li>1: ~31.30%</li><li>2: ~30.50%</li></ul> |
* Samples:
  | sentence1                                                                                                                                                                                                                                                                                                                                                    | sentence2                                                                                | label          |
  |:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------|:---------------|
  | <code>But if Congress opts for debt over taxation, you can count on thoughtless commentators to denounce the interest payments on that debt as a second, and separate, outrage.</code>                                                                                                                                                                       | <code>Everybody considers the interest on the national debt an outrage.</code>           | <code>1</code> |
  | <code>The 1997 KNVB Cup Final was a football match between Roda JC and Heerenveen on 8 May 1997 at De Kuip, Rotterdam. It was the final match of the 1996–97 KNVB Cup competition and the 79th KNVB Cup final. Roda won 4–2 after goals from Gerald Sibon, Ger Senden, Eric van der Luer and Maarten Schops. It was the side's first KNVB Cup trophy.</code> | <code>Roda JC kept the Cup trophy at their headquarters.</code>                          | <code>1</code> |
  | <code>Discover Financial Services, Inc. is an American financial services company, which issues the Discover Card and operates the Discover and Pulse networks, and owns Diners Club International. Discover Card is the third largest credit card brand in the United States, when measured by cards in force, with nearly 50 million cardholders.</code>   | <code>Discover Card is a way to build credit for less than 50 million cardholders</code> | <code>0</code> |
* Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss)

#### zero-shot-label-nli

* Dataset: [zero-shot-label-nli](https://huggingface.co/datasets/tasksource/zero-shot-label-nli) at [b363c89](https://huggingface.co/datasets/tasksource/zero-shot-label-nli/tree/b363c895cd4b15b814b9dbd7e4466cd301c96b2a)
* Size: 14,419 evaluation samples
* Columns: <code>label</code>, <code>sentence1</code>, and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | label                                           | sentence1                                                                           | sentence2                                                                        |
  |:--------|:------------------------------------------------|:------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|
  | type    | int                                             | string                                                                              | string                                                                           |
  | details | <ul><li>0: ~51.40%</li><li>2: ~48.60%</li></ul> | <ul><li>min: 3 tokens</li><li>mean: 72.92 tokens</li><li>max: 2048 tokens</li></ul> | <ul><li>min: 7 tokens</li><li>mean: 8.01 tokens</li><li>max: 23 tokens</li></ul> |
* Samples:
  | label          | sentence1                                                                                                                                                                                                                                                                          | sentence2                                   |
  |:---------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------|
  | <code>2</code> | <code>Police suspected that Shaichat , 20 , had been abducted either by Palestinians or by Israeli Arabs .<br>Nobody claimed responsibility for Schaichat 's death , but police suspect that the 20-year-old soldier was abducted either by Palestinians or Israeli Arabs .</code> | <code>This example is equivalent.</code>    |
  | <code>2</code> | <code>Can immorality be achieved by blocking death genes?<br>Can immortality be achieved by blocking death genes?</code>                                                                                                                                                           | <code>This example is not_duplicate.</code> |
  | <code>2</code> | <code>can a minor sit at a bar in nj</code>                                                                                                                                                                                                                                        | <code>This example is False.</code>         |
* Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss)

#### dataset_train_nli

* Dataset: [dataset_train_nli](https://huggingface.co/datasets/MoritzLaurer/dataset_train_nli) at [1e00964](https://huggingface.co/datasets/MoritzLaurer/dataset_train_nli/tree/1e009645b2943106614107b06107b1ee85ac1161)
* Size: 1,018,733 evaluation samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                           | sentence2                                                                         | label                                           |
  |:--------|:------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:------------------------------------------------|
  | type    | string                                                                              | string                                                                            | int                                             |
  | details | <ul><li>min: 4 tokens</li><li>mean: 96.94 tokens</li><li>max: 1020 tokens</li></ul> | <ul><li>min: 8 tokens</li><li>mean: 13.86 tokens</li><li>max: 38 tokens</li></ul> | <ul><li>0: ~52.50%</li><li>1: ~47.50%</li></ul> |
* Samples:
  | sentence1                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                       | sentence2                                                                           | label          |
  |:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:---------------|
  | <code>Ecoplug MAX®<br>ECOPLUG MAX® is an efficient method to prevent regroth from leaf trees.<br>- Provides 100 percent effective on all brushwood<br>- Can be used all year round<br>- Kills all unwanted leaf tree<br>- Minimizes chemical diffusion<br>- Kills the entire root system of the treated tree/stump<br>- Fully selective method<br>reduce chemical use up to 90% compared to previously used methods.<br>- Can be used all year around.<br>- Will exterminate: Alder, elm, aspen, birch, beech, lime, maple, mountain ash,sallow, poplar, ash, cherry, bird cherry, oak and more broad leafed trees<br>- Minimize the use of chemicals during treatment of trees and stumps.<br>- The product will kill off the entire root system, but only the root system. Neither people, animals or the enviromnent will be exposed to our product..</code> | <code>This text is about: root extermination</code>                                 | <code>0</code> |
  | <code>can you start f. m. eight hundred and ninety radio channel</code>                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                         | <code>The intent of this example utterance is a datetime query.</code>              | <code>1</code> |
  | <code>never again swings between false sentiment and unfunny madcap comedy and , along the way , expects the audience to invest in the central relationship as some kind of marriage of true minds .</code>                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                     | <code>The sentiment in this example rotten tomatoes movie review is negative</code> | <code>0</code> |
* Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss)

#### paws/labeled_final

* Dataset: [paws/labeled_final](https://huggingface.co/datasets/paws) at [161ece9](https://huggingface.co/datasets/paws/tree/161ece9501cf0a11f3e48bd356eaa82de46d6a09)
* Size: 8,000 evaluation samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                         | label                                           |
  |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:------------------------------------------------|
  | type    | string                                                                            | string                                                                            | int                                             |
  | details | <ul><li>min: 9 tokens</li><li>mean: 27.86 tokens</li><li>max: 51 tokens</li></ul> | <ul><li>min: 9 tokens</li><li>mean: 27.83 tokens</li><li>max: 52 tokens</li></ul> | <ul><li>0: ~54.90%</li><li>1: ~45.10%</li></ul> |
* Samples:
  | sentence1                                                                                                                                                      | sentence2                                                                                                                                                           | label          |
  |:---------------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------|
  | <code>Bradd Crellin represented BARLA Cumbria on a tour of Australia with 6 other players representing Britain , also on a tour of Australia .</code>          | <code>Bradd Crellin also represented BARLA Great Britain on a tour through Australia on a tour through Australia with 6 other players representing Cumbria .</code> | <code>0</code> |
  | <code>They were there to enjoy us and they were there to pray for us .</code>                                                                                  | <code>They were there for us to enjoy and they were there for us to pray .</code>                                                                                   | <code>1</code> |
  | <code>After the end of the war in June 1902 , Higgins left Southampton in the `` SSBavarian '' in August , returning to Cape Town the following month .</code> | <code>In August , after the end of the war in June 1902 , Higgins Southampton left the `` SSBavarian '' and returned to Cape Town the following month .</code>      | <code>1</code> |
* Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss)

#### glue/mrpc

* Dataset: [glue/mrpc](https://huggingface.co/datasets/glue) at [bcdcba7](https://huggingface.co/datasets/glue/tree/bcdcba79d07bc864c1c254ccfcedcce55bcc9a8c)
* Size: 408 evaluation samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 408 samples:
  |         | sentence1                                                                          | sentence2                                                                          | label                                           |
  |:--------|:-----------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|:------------------------------------------------|
  | type    | string                                                                             | string                                                                             | int                                             |
  | details | <ul><li>min: 14 tokens</li><li>mean: 27.92 tokens</li><li>max: 46 tokens</li></ul> | <ul><li>min: 11 tokens</li><li>mean: 27.24 tokens</li><li>max: 43 tokens</li></ul> | <ul><li>0: ~31.62%</li><li>1: ~68.38%</li></ul> |
* Samples:
  | sentence1                                                                                                                             | sentence2                                                                                                                                          | label          |
  |:--------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------|:---------------|
  | <code>He said the foodservice pie business doesn 't fit the company 's long-term growth strategy .</code>                             | <code>" The foodservice pie business does not fit our long-term growth strategy .</code>                                                           | <code>1</code> |
  | <code>Magnarelli said Racicot hated the Iraqi regime and looked forward to using his long years of training in the war .</code>       | <code>His wife said he was " 100 percent behind George Bush " and looked forward to using his years of training in the war .</code>                | <code>0</code> |
  | <code>The dollar was at 116.92 yen against the yen , flat on the session , and at 1.2891 against the Swiss franc , also flat .</code> | <code>The dollar was at 116.78 yen JPY = , virtually flat on the session , and at 1.2871 against the Swiss franc CHF = , down 0.1 percent .</code> | <code>0</code> |
* Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss)

#### glue/qqp

* Dataset: [glue/qqp](https://huggingface.co/datasets/glue) at [bcdcba7](https://huggingface.co/datasets/glue/tree/bcdcba79d07bc864c1c254ccfcedcce55bcc9a8c)
* Size: 40,430 evaluation samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                         | label                                           |
  |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:------------------------------------------------|
  | type    | string                                                                            | string                                                                            | int                                             |
  | details | <ul><li>min: 3 tokens</li><li>mean: 15.77 tokens</li><li>max: 48 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 16.05 tokens</li><li>max: 69 tokens</li></ul> | <ul><li>0: ~62.00%</li><li>1: ~38.00%</li></ul> |
* Samples:
  | sentence1                                                                                        | sentence2                                                                                                          | label          |
  |:-------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|:---------------|
  | <code>What happens to a question on Quora if it is marked as needing further improvement?</code> | <code>If Quora doesn't understand my question and marks it as needing improvement, can others still see it?</code> | <code>1</code> |
  | <code>What does the open blue circle in Facebook Messenger mean?</code>                          | <code>"what does ""delivered"" mean on Facebook messenger?"</code>                                                 | <code>0</code> |
  | <code>How do I cool my mind?</code>                                                              | <code>What is the best way to be cool?</code>                                                                      | <code>0</code> |
* Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss)

#### fever-evidence-related

* Dataset: [fever-evidence-related](https://huggingface.co/datasets/mwong/fever-evidence-related) at [14aba00](https://huggingface.co/datasets/mwong/fever-evidence-related/tree/14aba009b5fcd97b1a9ee6f3e3b0da0e308cf7cb)
* Size: 54,578 evaluation samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                             | label                                           |
  |:--------|:----------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------|:------------------------------------------------|
  | type    | string                                                                            | string                                                                                | int                                             |
  | details | <ul><li>min: 7 tokens</li><li>mean: 13.66 tokens</li><li>max: 26 tokens</li></ul> | <ul><li>min: 33 tokens</li><li>mean: 282.39 tokens</li><li>max: 1713 tokens</li></ul> | <ul><li>0: ~28.10%</li><li>1: ~71.90%</li></ul> |
* Samples:
  | sentence1                                                                                                                 | sentence2                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                | label          |
  |:--------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------|
  | <code>Colin Kaepernick became a starting quarterback during the 49ers 63rd season in the National Football League.</code> | <code>RapidAdvance is a technology-powered financial services company that provides working capital to small and mid-sized businesses in the United States .. United States. United States. financial services. financial services. working capital. working capital. small and mid-sized businesses. Small and medium-sized enterprises. It offers small business loan programs for business owners in a variety of industries , including traditional retail establishments , brand name chain restaurants , automotive repair , manufacturing , trucking , and professional service providers .. Founded in 2005 and headquartered in Bethesda , Maryland , the company was acquired by Dan Gilbert 's Rockbridge Growth Equity , LLC in 2013 .. It is part of Rock Ventures `` family '' of companies that include the Cleveland Cavaliers , Fathead , Quicken Loans and Genius .. Rock Ventures. Rock Ventures. Cleveland Cavaliers. Cleveland Cavaliers. Fathead. Fathead ( brand ). Quicken Loans. Quicken Loans. Genius. Genius</code>           | <code>1</code> |
  | <code>Colin Kaepernick became a starting quarterback during the 49ers 63rd season in the National Football League.</code> | <code>Arthur Herbert Copeland -LRB- June 22 , 1898 Rochester , New York -- July 6 , 1970 -RRB- was an American mathematician .. American. United States. He graduated from Harvard University in 1926 and taught at Rice University and the University of Michigan .. Rice University. Rice University. University of Michigan. University of Michigan. Harvard University. Harvard University. His main interest was in the foundations of probability .. probability. probability theory. He worked with Paul Erdos on the Copeland-Erdos constant .. Copeland-Erdos constant. Copeland-Erdos constant. Paul Erdos. Paul Erdos. His son , Arthur Herbert Copeland , Jr. , is also a mathematician .</code>                                                                                                                                                                                                                                                                                                                                             | <code>1</code> |
  | <code>Tilda Swinton is a vegan.</code>                                                                                    | <code>Michael Ronald Taylor -LRB- 1 June 1938 , Ealing , West London - 19 January 1969 -RRB- was a British jazz composer , pianist and co-songwriter for the band Cream .. Ealing. Ealing. London. London. British. United Kingdom. Cream. Cream ( band ). Mike Taylor was brought up by his grandparents in London and Kent , and joined the RAF for his national service .. London. London. Having rehearsed and written extensively throughout the early 1960s , he recorded two albums for the Lansdowne series produced by Denis Preston  : Pendulum -LRB- 1966 -RRB- with drummer Jon Hiseman , bassist Tony Reeves and saxophonist Dave Tomlin -RRB- and Trio -LRB- 1967 -RRB- with Hiseman and bassists Jack Bruce and Ron Rubin .. Denis Preston. Denis Preston. Jon Hiseman. Jon Hiseman. Dave Tomlin. Dave Tomlin ( musician ). Jack Bruce. Jack Bruce. They were issued on UK Columbia .. Columbia. Columbia Graphophone Company. During his brief recording career , several of Taylor 's pieces were played and recorded by his ...</code> | <code>1</code> |
* Loss: [<code>SoftmaxLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#softmaxloss)

#### glue/stsb

* Dataset: [glue/stsb](https://huggingface.co/datasets/glue) at [bcdcba7](https://huggingface.co/datasets/glue/tree/bcdcba79d07bc864c1c254ccfcedcce55bcc9a8c)
* Size: 1,500 evaluation samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                         | label                                                          |
  |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:---------------------------------------------------------------|
  | type    | string                                                                            | string                                                                            | float                                                          |
  | details | <ul><li>min: 5 tokens</li><li>mean: 16.46 tokens</li><li>max: 44 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 16.47 tokens</li><li>max: 44 tokens</li></ul> | <ul><li>min: 0.0</li><li>mean: 2.35</li><li>max: 5.0</li></ul> |
* Samples:
  | sentence1                                                                                                                      | sentence2                                                                                                                                 | label                           |
  |:-------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------|
  | <code>The room used for defecation is almost always referred to by euphemism.</code>                                           | <code>I'm English, and would probably use 'toilet' most of the time, and always in the context of a private home.</code>                  | <code>1.600000023841858</code>  |
  | <code>The two-year note US2YT=RR fell 5/32 in price, taking its yield to 1.23 percent from 1.16 percent late on Monday.</code> | <code>The benchmark 10-year note US10YT=RR lost 11/32 in price, taking its yield to 3.21 percent from 3.17 percent late on Monday.</code> | <code>2.0</code>                |
  | <code>I use Elinchrom Skyports, but if money is not an issue then go for PocketWizards.</code>                                 | <code>Or just go with the ultra-cheap YongNuo RF-602, which give you a lot of bang for the buck.</code>                                   | <code>1.2000000476837158</code> |
* Loss: [<code>CosineSimilarityLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cosinesimilarityloss) with these parameters:
  ```json
  {
      "loss_fct": "torch.nn.modules.loss.MSELoss"
  }
  ```

#### sick/relatedness

* Dataset: sick/relatedness
* Size: 495 evaluation samples
* Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
* Approximate statistics based on the first 495 samples:
  |         | sentence1                                                                         | sentence2                                                                         | label                                                          |
  |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:---------------------------------------------------------------|
  | type    | string                                                                            | string                                                                            | float                                                          |
  | details | <ul><li>min: 6 tokens</li><li>mean: 12.69 tokens</li><li>max: 32 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 12.15 tokens</li><li>max: 30 tokens</li></ul> | <ul><li>min: 1.0</li><li>mean: 3.59</li><li>max: 5.0</li></ul> |
* Samples:
  | sentence1                                                                      | sentence2                                                                 | label                           |
  |:-------------------------------------------------------------------------------|:--------------------------------------------------------------------------|:--------------------------------|
  | <code>The young boys are playing outdoors and the man is smiling nearby</code> | <code>There is no boy playing outdoors and there is no man smiling</code> | <code>3.5999999046325684</code> |
  | <code>A person in a black jacket is doing tricks on a motorbike</code>         | <code>A skilled person is riding a bicycle on one wheel</code>            | <code>3.4000000953674316</code> |
  | <code>Four children are doing backbends in the gym</code>                      | <code>Four girls are doing backbends and playing outdoors</code>          | <code>3.799999952316284</code>  |
* Loss: [<code>CosineSimilarityLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cosinesimilarityloss) with these parameters:
  ```json
  {
      "loss_fct": "torch.nn.modules.loss.MSELoss"
  }
  ```

#### sts-companion

* Dataset: [sts-companion](https://huggingface.co/datasets/tasksource/sts-companion) at [fd8beff](https://huggingface.co/datasets/tasksource/sts-companion/tree/fd8beffb788df5f6673bc688e6dcbe3690a3acc6)
* Size: 5,289 evaluation samples
* Columns: <code>label</code>, <code>sentence1</code>, and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | label                                                          | sentence1                                                                         | sentence2                                                                         |
  |:--------|:---------------------------------------------------------------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|
  | type    | float                                                          | string                                                                            | string                                                                            |
  | details | <ul><li>min: 0.0</li><li>mean: 3.24</li><li>max: 5.0</li></ul> | <ul><li>min: 5 tokens</li><li>mean: 19.56 tokens</li><li>max: 81 tokens</li></ul> | <ul><li>min: 5 tokens</li><li>mean: 17.21 tokens</li><li>max: 72 tokens</li></ul> |
* Samples:
  | label            | sentence1                                                                                                                                                                                                                                  | sentence2                                                                                                                                                                                  |
  |:-----------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>3.8</code> | <code>After all, it is by no means certain that the proposed definition of equitable price is better than any other, because the various definitions that are currently in use in the Member States are all perfectly satisfactory.</code> | <code>In fact, it is not absolutely certain that the definition of price that is proposed is better than another, because the different currently in the Member States all fully.  </code> |
  | <code>2.0</code> | <code>rslw: no, why would i hate them?</code>                                                                                                                                                                                              | <code>why do you hate america so much?</code>                                                                                                                                              |
  | <code>3.0</code> | <code>Families of #Newtown Victims Look for Answers on #Gun Violence  #NRA</code>                                                                                                                                                          | <code>Families of Newtown School Massacre Victims Organize Against Gun Violence</code>                                                                                                     |
* Loss: [<code>CosineSimilarityLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cosinesimilarityloss) with these parameters:
  ```json
  {
      "loss_fct": "torch.nn.modules.loss.MSELoss"
  }
  ```

### Training Hyperparameters
#### Non-Default Hyperparameters

- `per_device_train_batch_size`: 24
- `learning_rate`: 2e-05
- `weight_decay`: 1e-06
- `num_train_epochs`: 1
- `warmup_ratio`: 0.1
- `fp16`: True

#### All Hyperparameters
<details><summary>Click to expand</summary>

- `overwrite_output_dir`: False
- `do_predict`: False
- `eval_strategy`: no
- `prediction_loss_only`: True
- `per_device_train_batch_size`: 24
- `per_device_eval_batch_size`: 8
- `per_gpu_train_batch_size`: None
- `per_gpu_eval_batch_size`: None
- `gradient_accumulation_steps`: 1
- `eval_accumulation_steps`: None
- `torch_empty_cache_steps`: None
- `learning_rate`: 2e-05
- `weight_decay`: 1e-06
- `adam_beta1`: 0.9
- `adam_beta2`: 0.999
- `adam_epsilon`: 1e-08
- `max_grad_norm`: 1.0
- `num_train_epochs`: 1
- `max_steps`: -1
- `lr_scheduler_type`: linear
- `lr_scheduler_kwargs`: {}
- `warmup_ratio`: 0.1
- `warmup_steps`: 0
- `log_level`: passive
- `log_level_replica`: warning
- `log_on_each_node`: True
- `logging_nan_inf_filter`: True
- `save_safetensors`: True
- `save_on_each_node`: False
- `save_only_model`: False
- `restore_callback_states_from_checkpoint`: False
- `no_cuda`: False
- `use_cpu`: False
- `use_mps_device`: False
- `seed`: 42
- `data_seed`: None
- `jit_mode_eval`: False
- `use_ipex`: False
- `bf16`: False
- `fp16`: True
- `fp16_opt_level`: O1
- `half_precision_backend`: auto
- `bf16_full_eval`: False
- `fp16_full_eval`: False
- `tf32`: None
- `local_rank`: 0
- `ddp_backend`: None
- `tpu_num_cores`: None
- `tpu_metrics_debug`: False
- `debug`: []
- `dataloader_drop_last`: False
- `dataloader_num_workers`: 0
- `dataloader_prefetch_factor`: None
- `past_index`: -1
- `disable_tqdm`: False
- `remove_unused_columns`: True
- `label_names`: None
- `load_best_model_at_end`: False
- `ignore_data_skip`: False
- `fsdp`: []
- `fsdp_min_num_params`: 0
- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
- `fsdp_transformer_layer_cls_to_wrap`: None
- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
- `deepspeed`: None
- `label_smoothing_factor`: 0.0
- `optim`: adamw_torch
- `optim_args`: None
- `adafactor`: False
- `group_by_length`: False
- `length_column_name`: length
- `ddp_find_unused_parameters`: None
- `ddp_bucket_cap_mb`: None
- `ddp_broadcast_buffers`: False
- `dataloader_pin_memory`: True
- `dataloader_persistent_workers`: False
- `skip_memory_metrics`: True
- `use_legacy_prediction_loop`: False
- `push_to_hub`: False
- `resume_from_checkpoint`: None
- `hub_model_id`: None
- `hub_strategy`: every_save
- `hub_private_repo`: None
- `hub_always_push`: False
- `gradient_checkpointing`: False
- `gradient_checkpointing_kwargs`: None
- `include_inputs_for_metrics`: False
- `include_for_metrics`: []
- `eval_do_concat_batches`: True
- `fp16_backend`: auto
- `push_to_hub_model_id`: None
- `push_to_hub_organization`: None
- `mp_parameters`: 
- `auto_find_batch_size`: False
- `full_determinism`: False
- `torchdynamo`: None
- `ray_scope`: last
- `ddp_timeout`: 1800
- `torch_compile`: False
- `torch_compile_backend`: None
- `torch_compile_mode`: None
- `dispatch_batches`: None
- `split_batches`: None
- `include_tokens_per_second`: False
- `include_num_input_tokens_seen`: False
- `neftune_noise_alpha`: None
- `optim_target_modules`: None
- `batch_eval_metrics`: False
- `eval_on_start`: False
- `use_liger_kernel`: False
- `eval_use_gather_object`: False
- `average_tokens_across_devices`: False
- `prompts`: None
- `batch_sampler`: batch_sampler
- `multi_dataset_batch_sampler`: proportional

</details>

### Training Logs
| Epoch  | Step | Training Loss |
|:------:|:----:|:-------------:|
| 0.0067 | 500  | 10.6192       |
| 0.0134 | 1000 | 1.9196        |
| 0.0202 | 1500 | 1.0304        |
| 0.0269 | 2000 | 0.9269        |
| 0.0336 | 2500 | 0.7738        |
| 0.0403 | 3000 | 0.7092        |
| 0.0471 | 3500 | 0.6571        |
| 0.0538 | 4000 | 0.6408        |
| 0.0605 | 4500 | 0.6348        |
| 0.0672 | 5000 | 0.5927        |
| 0.0739 | 5500 | 0.5848        |
| 0.0807 | 6000 | 0.5542        |
| 0.0874 | 6500 | 0.558         |
| 0.0941 | 7000 | 0.5394        |
| 0.1008 | 7500 | 0.5632        |
| 0.1076 | 8000 | 0.5037        |
| 0.1143 | 8500 | 0.5278        |


### Framework Versions
- Python: 3.11.4
- Sentence Transformers: 3.3.1
- Transformers: 4.48.0.dev0
- PyTorch: 2.4.0+cu121
- Accelerate: 1.0.1
- Datasets: 2.20.0
- Tokenizers: 0.21.0

## Citation

### BibTeX

#### Sentence Transformers and SoftmaxLoss
```bibtex
@inproceedings{reimers-2019-sentence-bert,
    title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
    author = "Reimers, Nils and Gurevych, Iryna",
    booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
    month = "11",
    year = "2019",
    publisher = "Association for Computational Linguistics",
    url = "https://arxiv.org/abs/1908.10084",
}
```

#### MultipleNegativesRankingLoss
```bibtex
@misc{henderson2017efficient,
    title={Efficient Natural Language Response Suggestion for Smart Reply},
    author={Matthew Henderson and Rami Al-Rfou and Brian Strope and Yun-hsuan Sung and Laszlo Lukacs and Ruiqi Guo and Sanjiv Kumar and Balint Miklos and Ray Kurzweil},
    year={2017},
    eprint={1705.00652},
    archivePrefix={arXiv},
    primaryClass={cs.CL}
}
```

#### CachedMultipleNegativesRankingLoss
```bibtex
@misc{gao2021scaling,
    title={Scaling Deep Contrastive Learning Batch Size under Memory Limited Setup},
    author={Luyu Gao and Yunyi Zhang and Jiawei Han and Jamie Callan},
    year={2021},
    eprint={2101.06983},
    archivePrefix={arXiv},
    primaryClass={cs.LG}
}
```

<!--
## Glossary

*Clearly define terms in order to be accessible across audiences.*
-->

<!--
## Model Card Authors

*Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.*
-->

<!--
## Model Card Contact

*Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.*
-->