MMFMChallenge / submission_info /b7d5efa8-2e0d-49a3-ab2d-223a050c9536.json
helloai0's picture
Upload submission_info/b7d5efa8-2e0d-49a3-ab2d-223a050c9536.json with huggingface_hub
8836d2e verified
raw
history blame
1.68 kB
{
"id": "b7d5efa8-2e0d-49a3-ab2d-223a050c9536",
"submissions": [
{
"datetime": "2024-06-07 22:21:22",
"submission_id": "d0bc93fa-47e5-4f78-9852-d43dd4c66768",
"submission_comment": "",
"submission_repo": "",
"space_id": "",
"submitted_by": "658da65dc6e5876f63dd5cf8",
"status": 3,
"selected": false,
"public_score": {
"iconqa_fill_acc": 0.45,
"funsd_acc": 0.8,
"iconqa_choose_acc": 0.505,
"wildreceipt_acc": 0.895,
"textbookqa_acc": 0.615,
"tabfact_acc": 0.595,
"docvqa_acc": 0.275,
"infographicvqa_acc": 0.295,
"websrc_acc": 0.44,
"wtq_acc": 0.2,
"phase1_overall_acc": 0.507,
"mydoc_acc": 0.0825,
"mychart_acc": 0.025,
"myinfographic_acc": 0.30841,
"phase2_overall_acc": 0.16537
},
"private_score": {
"iconqa_fill_acc": -1,
"funsd_acc": -1,
"iconqa_choose_acc": -1,
"wildreceipt_acc": -1,
"textbookqa_acc": -1,
"tabfact_acc": -1,
"docvqa_acc": -1,
"infographicvqa_acc": -1,
"websrc_acc": -1,
"wtq_acc": -1,
"phase1_overall_acc": -1,
"mydoc_acc": -1,
"mychart_acc": -1,
"myinfographic_acc": -1,
"phase2_overall_acc": -1
}
}
]
}