Spaces:
Running
Running
{"index":13,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":64.57,"GeoreviewClassification (rus-Cyrl)":50.25,"HeadlineClassification (rus-Cyrl)":85.68,"InappropriatenessClassification (rus-Cyrl)":67.19,"KinopoiskClassification (rus-Cyrl)":65.49,"RuReviewsClassification (rus-Cyrl)":67.68,"RuSciBenchGRNTIClassification (rus-Cyrl)":64.59,"RuSciBenchOECDClassification (rus-Cyrl)":51.13} | |
{"index":11,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":59.36,"GeoreviewClassification (rus-Cyrl)":50.98,"HeadlineClassification (rus-Cyrl)":70.09,"InappropriatenessClassification (rus-Cyrl)":60.76,"KinopoiskClassification (rus-Cyrl)":63.33,"RuReviewsClassification (rus-Cyrl)":68.52,"RuSciBenchGRNTIClassification (rus-Cyrl)":57.67,"RuSciBenchOECDClassification (rus-Cyrl)":44.2} | |
{"index":15,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":58.92,"GeoreviewClassification (rus-Cyrl)":49.69,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":61.6,"KinopoiskClassification (rus-Cyrl)":56.59,"RuReviewsClassification (rus-Cyrl)":65.28,"RuSciBenchGRNTIClassification (rus-Cyrl)":58.2,"RuSciBenchOECDClassification (rus-Cyrl)":43.91} | |
{"index":4,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/GritLM\/GritLM-7B\">GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":58.03,"GeoreviewClassification (rus-Cyrl)":45.72,"HeadlineClassification (rus-Cyrl)":78.05,"InappropriatenessClassification (rus-Cyrl)":60.11,"KinopoiskClassification (rus-Cyrl)":56.14,"RuReviewsClassification (rus-Cyrl)":61.42,"RuSciBenchGRNTIClassification (rus-Cyrl)":58.93,"RuSciBenchOECDClassification (rus-Cyrl)":45.83} | |
{"index":10,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":57.86,"GeoreviewClassification (rus-Cyrl)":47.23,"HeadlineClassification (rus-Cyrl)":74.88,"InappropriatenessClassification (rus-Cyrl)":61.94,"KinopoiskClassification (rus-Cyrl)":55.69,"RuReviewsClassification (rus-Cyrl)":66.44,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.55,"RuSciBenchOECDClassification (rus-Cyrl)":43.28} | |
{"index":0,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":57.43,"GeoreviewClassification (rus-Cyrl)":48.27,"HeadlineClassification (rus-Cyrl)":70.32,"InappropriatenessClassification (rus-Cyrl)":59.87,"KinopoiskClassification (rus-Cyrl)":58.23,"RuReviewsClassification (rus-Cyrl)":66.91,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.81,"RuSciBenchOECDClassification (rus-Cyrl)":42.57} | |
{"index":23,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":56.55,"GeoreviewClassification (rus-Cyrl)":46.04,"HeadlineClassification (rus-Cyrl)":69.98,"InappropriatenessClassification (rus-Cyrl)":61.39,"KinopoiskClassification (rus-Cyrl)":53.59,"RuReviewsClassification (rus-Cyrl)":64.58,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.67,"RuSciBenchOECDClassification (rus-Cyrl)":43.58} | |
{"index":14,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":56.19,"GeoreviewClassification (rus-Cyrl)":46.05,"HeadlineClassification (rus-Cyrl)":75.64,"InappropriatenessClassification (rus-Cyrl)":58.78,"KinopoiskClassification (rus-Cyrl)":50.89,"RuReviewsClassification (rus-Cyrl)":62.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.28,"RuSciBenchOECDClassification (rus-Cyrl)":42.69} | |
{"index":5,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":55.44,"GeoreviewClassification (rus-Cyrl)":39.67,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":64.64,"KinopoiskClassification (rus-Cyrl)":50.33,"RuReviewsClassification (rus-Cyrl)":58.29,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.19,"RuSciBenchOECDClassification (rus-Cyrl)":43.8} | |
{"index":6,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":55.21,"GeoreviewClassification (rus-Cyrl)":39.97,"HeadlineClassification (rus-Cyrl)":79.26,"InappropriatenessClassification (rus-Cyrl)":62.52,"KinopoiskClassification (rus-Cyrl)":49.51,"RuReviewsClassification (rus-Cyrl)":58.27,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.9,"RuSciBenchOECDClassification (rus-Cyrl)":43.04} | |
{"index":16,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":55.09,"GeoreviewClassification (rus-Cyrl)":44.66,"HeadlineClassification (rus-Cyrl)":73.94,"InappropriatenessClassification (rus-Cyrl)":59.16,"KinopoiskClassification (rus-Cyrl)":49.96,"RuReviewsClassification (rus-Cyrl)":61.18,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.99,"RuSciBenchOECDClassification (rus-Cyrl)":41.72} | |
{"index":12,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":54.23,"GeoreviewClassification (rus-Cyrl)":40.19,"HeadlineClassification (rus-Cyrl)":78.75,"InappropriatenessClassification (rus-Cyrl)":61.33,"KinopoiskClassification (rus-Cyrl)":48.78,"RuReviewsClassification (rus-Cyrl)":55.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.53,"RuSciBenchOECDClassification (rus-Cyrl)":41.34} | |
{"index":22,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":54.11,"GeoreviewClassification (rus-Cyrl)":42.33,"HeadlineClassification (rus-Cyrl)":70.35,"InappropriatenessClassification (rus-Cyrl)":59.32,"KinopoiskClassification (rus-Cyrl)":44.31,"RuReviewsClassification (rus-Cyrl)":62.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.01,"RuSciBenchOECDClassification (rus-Cyrl)":44.14} | |
{"index":24,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":53.46,"GeoreviewClassification (rus-Cyrl)":41.36,"HeadlineClassification (rus-Cyrl)":68.9,"InappropriatenessClassification (rus-Cyrl)":59.11,"KinopoiskClassification (rus-Cyrl)":50.47,"RuReviewsClassification (rus-Cyrl)":60.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.93,"RuSciBenchOECDClassification (rus-Cyrl)":40.79} | |
{"index":7,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":52.73,"GeoreviewClassification (rus-Cyrl)":40.89,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.48,"KinopoiskClassification (rus-Cyrl)":49.85,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.8,"RuSciBenchOECDClassification (rus-Cyrl)":40.36} | |
{"index":17,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":52.35,"GeoreviewClassification (rus-Cyrl)":40.86,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.52,"KinopoiskClassification (rus-Cyrl)":46.77,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.04,"RuSciBenchOECDClassification (rus-Cyrl)":40.48} | |
{"index":1,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":52.16,"GeoreviewClassification (rus-Cyrl)":38.95,"HeadlineClassification (rus-Cyrl)":75.59,"InappropriatenessClassification (rus-Cyrl)":60.68,"KinopoiskClassification (rus-Cyrl)":49.67,"RuReviewsClassification (rus-Cyrl)":54.05,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.53,"RuSciBenchOECDClassification (rus-Cyrl)":37.65} | |
{"index":3,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.49,"GeoreviewClassification (rus-Cyrl)":38.05,"HeadlineClassification (rus-Cyrl)":67.64,"InappropriatenessClassification (rus-Cyrl)":58.27,"KinopoiskClassification (rus-Cyrl)":45.86,"RuReviewsClassification (rus-Cyrl)":58.34,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.18,"RuSciBenchOECDClassification (rus-Cyrl)":40.11} | |
{"index":21,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":51.38,"GeoreviewClassification (rus-Cyrl)":38.24,"HeadlineClassification (rus-Cyrl)":68.3,"InappropriatenessClassification (rus-Cyrl)":58.18,"KinopoiskClassification (rus-Cyrl)":41.45,"RuReviewsClassification (rus-Cyrl)":58.88,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.19,"RuSciBenchOECDClassification (rus-Cyrl)":41.41} | |
{"index":9,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":51.37,"GeoreviewClassification (rus-Cyrl)":39.64,"HeadlineClassification (rus-Cyrl)":74.19,"InappropriatenessClassification (rus-Cyrl)":58.57,"KinopoiskClassification (rus-Cyrl)":49.06,"RuReviewsClassification (rus-Cyrl)":56.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":45.63,"RuSciBenchOECDClassification (rus-Cyrl)":35.48} | |
{"index":2,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":50.66,"GeoreviewClassification (rus-Cyrl)":37.22,"HeadlineClassification (rus-Cyrl)":75.23,"InappropriatenessClassification (rus-Cyrl)":57.34,"KinopoiskClassification (rus-Cyrl)":49.91,"RuReviewsClassification (rus-Cyrl)":50.74,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.03,"RuSciBenchOECDClassification (rus-Cyrl)":36.13} | |
{"index":8,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":42.68,"GeoreviewClassification (rus-Cyrl)":33.45,"HeadlineClassification (rus-Cyrl)":57.65,"InappropriatenessClassification (rus-Cyrl)":54.5,"KinopoiskClassification (rus-Cyrl)":41.36,"RuReviewsClassification (rus-Cyrl)":49.56,"RuSciBenchGRNTIClassification (rus-Cyrl)":35.71,"RuSciBenchOECDClassification (rus-Cyrl)":26.51} | |
{"index":20,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":29.53,"GeoreviewClassification (rus-Cyrl)":25.93,"HeadlineClassification (rus-Cyrl)":28.53,"InappropriatenessClassification (rus-Cyrl)":51.82,"KinopoiskClassification (rus-Cyrl)":34.18,"RuReviewsClassification (rus-Cyrl)":42.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":13.29,"RuSciBenchOECDClassification (rus-Cyrl)":10.62} | |
{"index":19,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":28.67,"GeoreviewClassification (rus-Cyrl)":27.08,"HeadlineClassification (rus-Cyrl)":27.77,"InappropriatenessClassification (rus-Cyrl)":51.73,"KinopoiskClassification (rus-Cyrl)":33.93,"RuReviewsClassification (rus-Cyrl)":41.79,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.08,"RuSciBenchOECDClassification (rus-Cyrl)":8.3} | |
{"index":18,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":28.33,"GeoreviewClassification (rus-Cyrl)":23.49,"HeadlineClassification (rus-Cyrl)":28.49,"InappropriatenessClassification (rus-Cyrl)":50.85,"KinopoiskClassification (rus-Cyrl)":34.17,"RuReviewsClassification (rus-Cyrl)":42.49,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.49,"RuSciBenchOECDClassification (rus-Cyrl)":8.31} | |