[ { "model": "sabia-2-small", "name": "SabiĆ”-2 Small", "link": "https://www.maritaca.ai/", "date": "2024-04-12", "status": "full", "main_language": "Portuguese", "result_metrics": { "enem_challenge": 0.7172848145556333, "bluex": 0.5549374130737135, "oab_exams": 0.6364464692482916, "assin2_sts": 0.7053302344881672, "assin2_rte": 0.9121728362223306, "faquad_nli": 0.7575848453041435, "hatebr_offensive": 0.5025338637870607, "portuguese_hate_speech": 0.4650217578860529, "tweetsentbr": 0.533977453070735 }, "result_metrics_average": 0.6428099652929031, "result_metrics_npm": 0.43960062672137007 }, { "model": "sabia-2-medium", "name": "SabiĆ”-2 Medium", "link": "https://www.maritaca.ai/", "date": "2024-04-13", "status": "full", "main_language": "Portuguese", "result_metrics": { "enem_challenge": 0.8180545836249126, "bluex": 0.717663421418637, "oab_exams": 0.7321184510250569, "assin2_sts": 0.7804108376537757, "assin2_rte": 0.923459363368553, "faquad_nli": 0.7657657657657658, "hatebr_offensive": 0.8349989882997386, "portuguese_hate_speech": 0.7379326358571694, "tweetsentbr": 0.7269533040381798 }, "result_metrics_average": 0.7819285945613098, "result_metrics_npm": 0.6676121786922709 }, { "model": "gpt-3.5-turbo-0125", "name": "GPT-3.5 Turbo (0125)", "link": "https://www.openai.com/", "date": "2024-03-08", "status": "full", "main_language": "English", "result_metrics": { "enem_challenge": 0.7214835549335199, "bluex": 0.6244784422809457, "oab_exams": 0.5430523917995445, "assin2_sts": 0.7378460201077941, "assin2_rte": 0.8823038414050672, "faquad_nli": 0.746353108609074, "hatebr_offensive": 0.8056205941193919, "portuguese_hate_speech": 0.7363692688971499, "tweetsentbr": 0.7028981330613626 }, "result_metrics_average": 0.7222672616904278, "result_metrics_npm": 0.5841504766165372 }, { "model": "claude-3-haiku-20240307", "name": "Claude-3 Haiku (20240307)", "link": "https://www.claude.ai/", "date": "2024-04-13", "status": "full", "main_language": "English", "result_metrics": { "enem_challenge": 0.7718684394681595, "bluex": 0.6662030598052852, "oab_exams": 0.626879271070615, "assin2_sts": 0.7892124744168747, "assin2_rte": 0.9184462138121732, "faquad_nli": 0.6340996599941455, "hatebr_offensive": 0.8023698759439051, "portuguese_hate_speech": 0.7342166269560177, "tweetsentbr": 0.5477486799750156 }, "result_metrics_average": 0.7212271446046878, "result_metrics_npm": 0.5735261536314672 }, { "model": "gemini-1.0-pro", "name": "Gemini 1.0 Pro", "link": "https://ai.google.dev/", "date": "2024-03-08", "status": "full", "main_language": "English", "result_metrics": { "enem_challenge": 0.7130860741777467, "bluex": 0.5869262865090403, "oab_exams": 0.4988610478359909, "assin2_sts": 0.7058831239763663, "assin2_rte": 0.8945993304651698, "faquad_nli": 0.7070913567220611, "hatebr_offensive": 0.8086330094493972, "portuguese_hate_speech": 0.699119105113102, "tweetsentbr": 0.6803240476660983 }, "result_metrics_average": 0.6993914868794414, "result_metrics_npm": 0.551208000273598 } ]