nasmahmoud
commited on
Commit
•
4e5fccf
1
Parent(s):
6268bd0
Upload artifacts (medium scale)
Browse files
medium_scale/checkpoint.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1777af2431fbd4c52fce359fbaf68bd0dc0c18687135b4d4732a332dfe6b39ab
|
3 |
+
size 1815639353
|
medium_scale/results.jsonl
ADDED
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{"key": "vtab/caltech101", "dataset": "Caltech-101", "metrics": {"acc1": 0.7464256368118324, "acc5": 0.9112571898110107, "mean_per_class_recall": 0.7186019323786229, "main_metric": 0.7186019323786229}}
|
2 |
+
{"key": "cifar10", "dataset": "CIFAR-10", "metrics": {"acc1": 0.8543, "acc5": 0.9958, "mean_per_class_recall": 0.8543, "main_metric": 0.8543}}
|
3 |
+
{"key": "vtab/cifar100", "dataset": "CIFAR-100", "metrics": {"acc1": 0.598, "acc5": 0.8614, "mean_per_class_recall": 0.598, "main_metric": 0.598}}
|
4 |
+
{"key": "vtab/clevr_count_all", "dataset": "CLEVR Counts", "metrics": {"acc1": 0.1564, "acc5": 0.6730666666666667, "mean_per_class_recall": 0.1566306963399312, "main_metric": 0.1564}}
|
5 |
+
{"key": "vtab/clevr_closest_object_distance", "dataset": "CLEVR Distance", "metrics": {"acc1": 0.226, "acc5": 0.9186666666666666, "mean_per_class_recall": 0.16676623415477543, "main_metric": 0.226}}
|
6 |
+
{"key": "country211", "dataset": "Country211", "metrics": {"acc1": 0.038104265402843604, "acc5": 0.12161137440758293, "mean_per_class_recall": 0.038104265402843604, "main_metric": 0.038104265402843604}}
|
7 |
+
{"key": "vtab/dtd", "dataset": "Describable Textures", "metrics": {"acc1": 0.2351063829787234, "acc5": 0.4728723404255319, "mean_per_class_recall": 0.2351063829787234, "main_metric": 0.2351063829787234}}
|
8 |
+
{"key": "vtab/eurosat", "dataset": "EuroSAT", "metrics": {"acc1": 0.3566666666666667, "acc5": 0.7957407407407407, "mean_per_class_recall": 0.35152711189705543, "main_metric": 0.3566666666666667}}
|
9 |
+
{"key": "fgvc_aircraft", "dataset": "FGVC Aircraft", "metrics": {"acc1": 0.023702370237023703, "acc5": 0.0933093309330933, "mean_per_class_recall": 0.02359180035650624, "main_metric": 0.02359180035650624}}
|
10 |
+
{"key": "food101", "dataset": "Food-101", "metrics": {"acc1": 0.4339009900990099, "acc5": 0.7241584158415841, "mean_per_class_recall": 0.43390099009900995, "main_metric": 0.4339009900990099}}
|
11 |
+
{"key": "gtsrb", "dataset": "GTSRB", "metrics": {"acc1": 0.1593032462391132, "acc5": 0.5676959619952494, "mean_per_class_recall": 0.18669833447083195, "main_metric": 0.1593032462391132}}
|
12 |
+
{"key": "imagenet1k", "dataset": "ImageNet 1k", "metrics": {"acc1": 0.30322, "acc5": 0.55372, "mean_per_class_recall": 0.30328, "main_metric": 0.30322}}
|
13 |
+
{"key": "imagenet_sketch", "dataset": "ImageNet Sketch", "metrics": {"acc1": 0.22291654385034093, "acc5": 0.43280473186739765, "mean_per_class_recall": 0.22302705882352938, "main_metric": 0.22291654385034093}}
|
14 |
+
{"key": "imagenetv2", "dataset": "ImageNet v2", "metrics": {"acc1": 0.2508, "acc5": 0.4917, "mean_per_class_recall": 0.2511, "main_metric": 0.2508}}
|
15 |
+
{"key": "imagenet-a", "dataset": "ImageNet-A", "metrics": {"acc1": 0.04853333333333333, "acc5": 0.19253333333333333, "mean_per_class_recall": 0.05945813574258465, "main_metric": 0.04853333333333333}}
|
16 |
+
{"key": "imagenet-o", "dataset": "ImageNet-O", "metrics": {"acc1": 0.41, "acc5": 0.708, "mean_per_class_recall": 0.4191233276140784, "main_metric": 0.41}}
|
17 |
+
{"key": "imagenet-r", "dataset": "ImageNet-R", "metrics": {"acc1": 0.3854666666666667, "acc5": 0.6323333333333333, "mean_per_class_recall": 0.3693329270461348, "main_metric": 0.3854666666666667}}
|
18 |
+
{"key": "vtab/kitti_closest_vehicle_distance", "dataset": "KITTI Vehicle Distance", "metrics": {"acc1": 0.32489451476793246, "acc5": null, "mean_per_class_recall": 0.27692307692307694, "main_metric": 0.32489451476793246}}
|
19 |
+
{"key": "mnist", "dataset": "MNIST", "metrics": {"acc1": 0.1993, "acc5": 0.5565, "mean_per_class_recall": 0.19578834286218955, "main_metric": 0.1993}}
|
20 |
+
{"key": "objectnet", "dataset": "ObjectNet", "metrics": {"acc1": 0.207440508237321, "acc5": 0.42317217616022396, "mean_per_class_recall": 0.20388164128419392, "main_metric": 0.207440508237321}}
|
21 |
+
{"key": "vtab/flowers", "dataset": "Oxford Flowers-102", "metrics": {"acc1": 0.31533582696373397, "acc5": 0.5392746788095625, "mean_per_class_recall": 0.2674312643861751, "main_metric": 0.2674312643861751}}
|
22 |
+
{"key": "vtab/pets", "dataset": "Oxford-IIIT Pet", "metrics": {"acc1": 0.38648133006268737, "acc5": 0.7369855546470427, "mean_per_class_recall": 0.3845064171043387, "main_metric": 0.3845064171043387}}
|
23 |
+
{"key": "voc2007", "dataset": "Pascal VOC 2007", "metrics": {"acc1": 0.6211271367521367, "acc5": 0.8894898504273504, "mean_per_class_recall": 0.6897094573071872, "main_metric": 0.6211271367521367}}
|
24 |
+
{"key": "vtab/pcam", "dataset": "PatchCamelyon", "metrics": {"acc1": 0.61297607421875, "acc5": null, "mean_per_class_recall": 0.6129324027660777, "main_metric": 0.61297607421875}}
|
25 |
+
{"key": "renderedsst2", "dataset": "Rendered SST2", "metrics": {"acc1": 0.4755628775398133, "acc5": null, "mean_per_class_recall": 0.4752728562329917, "main_metric": 0.4755628775398133}}
|
26 |
+
{"key": "vtab/resisc45", "dataset": "RESISC45", "metrics": {"acc1": 0.29793650793650794, "acc5": 0.5873015873015873, "mean_per_class_recall": 0.30387451950619665, "main_metric": 0.29793650793650794}}
|
27 |
+
{"key": "cars", "dataset": "Stanford Cars", "metrics": {"acc1": 0.4571570700161671, "acc5": 0.8290013679890561, "mean_per_class_recall": 0.4569552456311441, "main_metric": 0.4571570700161671}}
|
28 |
+
{"key": "stl10", "dataset": "STL-10", "metrics": {"acc1": 0.86025, "acc5": 0.993875, "mean_per_class_recall": 0.8602500000000001, "main_metric": 0.86025}}
|
29 |
+
{"key": "sun397", "dataset": "SUN397", "metrics": {"acc1": 0.42790150247347225, "acc5": 0.7514022472736634, "mean_per_class_recall": 0.3899879832767705, "main_metric": 0.42790150247347225}}
|
30 |
+
{"key": "vtab/svhn", "dataset": "SVHN", "metrics": {"acc1": 0.09887830362630609, "acc5": 0.5516287645974186, "mean_per_class_recall": 0.12005740591897252, "main_metric": 0.09887830362630609}}
|
31 |
+
{"key": "retrieval/flickr_1k_test_image_text_retrieval", "dataset": "Flickr", "metrics": {"image_retrieval_recall@1": 0.2046000063419342, "text_retrieval_recall@1": 0.3009999990463257, "image_retrieval_recall@5": 0.43639999628067017, "text_retrieval_recall@5": 0.5680000185966492, "image_retrieval_recall@10": 0.5576000213623047, "text_retrieval_recall@10": 0.6919999718666077, "mean_recall@1": 0.25280000269412994, "main_metric": 0.25280000269412994}}
|
32 |
+
{"key": "retrieval/mscoco_2014_5k_test_image_text_retrieval", "dataset": "MSCOCO", "metrics": {"image_retrieval_recall@1": 0.1261095553636551, "text_retrieval_recall@1": 0.21699999272823334, "image_retrieval_recall@5": 0.30719712376594543, "text_retrieval_recall@5": 0.43220001459121704, "image_retrieval_recall@10": 0.41703319549560547, "text_retrieval_recall@10": 0.5461999773979187, "mean_recall@1": 0.1715547740459442, "main_metric": 0.1715547740459442}}
|
33 |
+
{"key": "misc/winogavil", "dataset": "WinoGAViL", "metrics": {"avg_jaccard_score": 0.4891343570827152, "jaccard_score_5": 0.530530303030303, "jaccard_score_6": 0.4989680825533957, "jaccard_score_10": 0.44209702660406885, "jaccard_score_12": 0.37636849132176237, "jaccard_score_5-6": 0.5143472376030516, "jaccard_score_10-12": 0.409155793464927, "main_metric": 0.409155793464927}}
|
34 |
+
{"key": "wilds/iwildcam", "dataset": "iWildCam", "metrics": {"acc1": 0.031244887943726485, "acc5": 0.1940361290925662, "mean_per_class_recall": 0.028813329270577715, "acc_avg": 0.03124488703906536, "recall-macro_all": 0.028813329270577715, "F1-macro_all": 0.017251882628839282, "main_metric": 0.017251882628839282}}
|
35 |
+
{"key": "wilds/camelyon17", "dataset": "Camelyon17", "metrics": {"acc1": 0.7436452136289886, "acc5": null, "mean_per_class_recall": 0.7436452136289886, "acc_avg": 0.743645191192627, "acc_slide:0": NaN, "count_slide:0": 0.0, "acc_slide:1": NaN, "count_slide:1": 0.0, "acc_slide:2": NaN, "count_slide:2": 0.0, "acc_slide:3": NaN, "count_slide:3": 0.0, "acc_slide:4": NaN, "count_slide:4": 0.0, "acc_slide:5": NaN, "count_slide:5": 0.0, "acc_slide:6": NaN, "count_slide:6": 0.0, "acc_slide:7": NaN, "count_slide:7": 0.0, "acc_slide:8": NaN, "count_slide:8": 0.0, "acc_slide:9": NaN, "count_slide:9": 0.0, "acc_slide:10": NaN, "count_slide:10": 0.0, "acc_slide:11": NaN, "count_slide:11": 0.0, "acc_slide:12": NaN, "count_slide:12": 0.0, "acc_slide:13": NaN, "count_slide:13": 0.0, "acc_slide:14": NaN, "count_slide:14": 0.0, "acc_slide:15": NaN, "count_slide:15": 0.0, "acc_slide:16": NaN, "count_slide:16": 0.0, "acc_slide:17": NaN, "count_slide:17": 0.0, "acc_slide:18": NaN, "count_slide:18": 0.0, "acc_slide:19": NaN, "count_slide:19": 0.0, "acc_slide:20": 0.8433070778846741, "count_slide:20": 3810.0, "acc_slide:21": 0.8486735224723816, "count_slide:21": 3694.0, "acc_slide:22": 0.6742025017738342, "count_slide:22": 7210.0, "acc_slide:23": 0.6181921362876892, "count_slide:23": 5288.0, "acc_slide:24": 0.6967775225639343, "count_slide:24": 7727.0, "acc_slide:25": 0.7090447545051575, "count_slide:25": 4334.0, "acc_slide:26": 0.7040629386901855, "count_slide:26": 3815.0, "acc_slide:27": 0.8266022801399231, "count_slide:27": 4556.0, "acc_slide:28": 0.7430830001831055, "count_slide:28": 31878.0, "acc_slide:29": 0.7985402345657349, "count_slide:29": 12742.0, "acc_wg": 0.6181921362876892, "main_metric": 0.7436452136289886}}
|
36 |
+
{"key": "wilds/fmow", "dataset": "FMoW", "metrics": {"acc1": 0.06897955491224896, "acc5": 0.23064049212954588, "mean_per_class_recall": 0.06781489852952537, "acc_avg": 0.06897955387830734, "acc_year:0": NaN, "count_year:0": 0.0, "acc_year:1": NaN, "count_year:1": 0.0, "acc_year:2": NaN, "count_year:2": 0.0, "acc_year:3": NaN, "count_year:3": 0.0, "acc_year:4": NaN, "count_year:4": 0.0, "acc_year:5": NaN, "count_year:5": 0.0, "acc_year:6": NaN, "count_year:6": 0.0, "acc_year:7": NaN, "count_year:7": 0.0, "acc_year:8": NaN, "count_year:8": 0.0, "acc_year:9": NaN, "count_year:9": 0.0, "acc_year:10": NaN, "count_year:10": 0.0, "acc_year:11": NaN, "count_year:11": 0.0, "acc_year:12": NaN, "count_year:12": 0.0, "acc_year:13": NaN, "count_year:13": 0.0, "acc_year:14": 0.06717212498188019, "count_year:14": 15959.0, "acc_year:15": 0.07367051392793655, "count_year:15": 6149.0, "acc_worst_year": 0.06717212498188019, "acc_region:0": 0.05017126724123955, "count_region:0": 4963.0, "acc_region:1": 0.08142710477113724, "count_region:1": 5858.0, "acc_region:2": 0.12456613779067993, "count_region:2": 2593.0, "acc_region:3": 0.053963109850883484, "count_region:3": 8024.0, "acc_region:4": 0.06306306272745132, "count_region:4": 666.0, "acc_region:5": 0.25, "count_region:5": 4.0, "acc_worst_region": 0.05017126724123955, "main_metric": 0.05017126724123955}}
|
37 |
+
{"key": "fairness/dollar_street", "dataset": "Dollar Street", "metrics": {"acc1": 0.3776762774764488, "acc5": 0.631458749643163, "mean_per_class_recall": 0.40430960621512285, "acc_top5_avg": 0.6314587593078613, "acc_top5_income_ds:0": 0.4532710313796997, "count_income_ds:0": 856.0, "acc_top5_income_ds:1": 0.5848416090011597, "count_income_ds:1": 884.0, "acc_top5_income_ds:2": 0.7258601784706116, "count_income_ds:2": 901.0, "acc_top5_income_ds:3": 0.7575405836105347, "count_income_ds:3": 862.0, "acc_top5_wg": 0.4532710313796997, "main_metric": 0.4532710313796997}}
|
38 |
+
{"key": "fairness/geode", "dataset": "GeoDE", "metrics": {"acc1": 0.7172485586162716, "acc5": 0.9363388853299167, "mean_per_class_recall": 0.715144864008413, "acc_avg": 0.7172485589981079, "acc_region:0": 0.6889352798461914, "count_region:0": 2395.0, "acc_region:1": 0.7009950280189514, "count_region:1": 2010.0, "acc_region:2": 0.7177798748016357, "count_region:2": 2126.0, "acc_region:3": 0.7216230034828186, "count_region:3": 1947.0, "acc_region:4": 0.7290836572647095, "count_region:4": 1757.0, "acc_region:5": 0.7483355402946472, "count_region:5": 2253.0, "acc_wg": 0.6889352798461914, "main_metric": 0.6889352798461914}}
|
39 |
+
{"key": "fairness/fairface", "dataset": "FairFace", "metrics": {"acc_race_avg": 0.6770129799842834, "acc_race_race_binary:0": 0.3851318955421448, "count_race_binary:0": 2085.0, "acc_race_race_binary:1": 0.7456308603286743, "count_race_binary:1": 8869.0, "acc_race_wg": 0.3851318955421448, "acc_gender_avg": 0.6687967777252197, "acc_gender_race_binary:0": 0.70071941614151, "acc_gender_race_binary:1": 0.6612921357154846, "acc_gender_wg": 0.6612921357154846, "acc_age_avg": 0.0775972232222557, "acc_age_race_binary:0": 0.07146283239126205, "acc_age_race_binary:1": 0.07903935015201569, "acc_age_wg": 0.07146283239126205, "acc_gender_x_avg": 0.6687967777252197, "acc_gender_x_race:0_gender:0": 0.6495619416236877, "count_race:0_gender:0": 799.0, "acc_gender_x_race:0_gender:1": 0.6103038191795349, "count_race:0_gender:1": 757.0, "acc_gender_x_race:1_gender:0": 0.6737967729568481, "count_race:1_gender:0": 1122.0, "acc_gender_x_race:1_gender:1": 0.732087254524231, "count_race:1_gender:1": 963.0, "acc_gender_x_race:2_gender:0": 0.5922974944114685, "count_race:2_gender:0": 753.0, "acc_gender_x_race:2_gender:1": 0.7064220309257507, "count_race:2_gender:1": 763.0, "acc_gender_x_race:3_gender:0": 0.6305170059204102, "count_race:3_gender:0": 793.0, "acc_gender_x_race:3_gender:1": 0.725301206111908, "count_race:3_gender:1": 830.0, "acc_gender_x_race:4_gender:0": 0.7269372940063477, "count_race:4_gender:0": 813.0, "acc_gender_x_race:4_gender:1": 0.6414141654968262, "count_race:4_gender:1": 396.0, "acc_gender_x_race:5_gender:0": 0.5469387769699097, "count_race:5_gender:0": 735.0, "acc_gender_x_race:5_gender:1": 0.7411764860153198, "count_race:5_gender:1": 680.0, "acc_gender_x_race:6_gender:0": 0.5444015264511108, "count_race:6_gender:0": 777.0, "acc_gender_x_race:6_gender:1": 0.8059508204460144, "count_race:6_gender:1": 773.0, "acc_gender_x_wg": 0.5444015264511108, "toxicity_crime_avg": 0.8861603140830994, "toxicity_crime_race:0": 0.8007712364196777, "count_race:0": 1556.0, "toxicity_crime_race:1": 0.9122301936149597, "count_race:1": 2085.0, "toxicity_crime_race:2": 0.8654353618621826, "count_race:2": 1516.0, "toxicity_crime_race:3": 0.9248305559158325, "count_race:3": 1623.0, "toxicity_crime_race:4": 0.9131513833999634, "count_race:4": 1209.0, "toxicity_crime_race:5": 0.9003533720970154, "count_race:5": 1415.0, "toxicity_crime_race:6": 0.8825806379318237, "count_race:6": 1550.0, "toxicity_crime_wg": 0.8007712364196777, "toxicity_nonhuman_avg": 0.02966952696442604, "toxicity_nonhuman_race:0": 0.07840617001056671, "toxicity_nonhuman_race:1": 0.01966426894068718, "toxicity_nonhuman_race:2": 0.038918204605579376, "toxicity_nonhuman_race:3": 0.014787430875003338, "toxicity_nonhuman_race:4": 0.03060380555689335, "toxicity_nonhuman_race:5": 0.014840989373624325, "toxicity_nonhuman_race:6": 0.013548387214541435, "toxicity_nonhuman_wg": 0.013548387214541435, "main_metric": null}}
|
40 |
+
{"key": "fairness/utkface", "dataset": "UTKFace", "metrics": {"acc_race_avg": 0.6243091821670532, "acc_race_race_binary:0": 0.36393409967422485, "count_race_binary:0": 10076.0, "acc_race_race_binary:1": 0.8168342113494873, "count_race_binary:1": 13627.0, "acc_race_wg": 0.36393409967422485, "acc_gender_avg": 0.7391891479492188, "acc_gender_race_binary:0": 0.7470226287841797, "acc_gender_race_binary:1": 0.7333969473838806, "acc_gender_wg": 0.7333969473838806, "acc_age_avg": 0.09344808757305145, "acc_age_race_binary:0": 0.06530369073152542, "acc_age_race_binary:1": 0.11425846070051193, "acc_age_wg": 0.06530369073152542, "acc_gender_x_avg": 0.7391891479492188, "acc_gender_x_race:0_gender:0": 0.8610871434211731, "count_race:0_gender:0": 2318.0, "acc_gender_x_race:0_gender:1": 0.6277173757553101, "count_race:0_gender:1": 2208.0, "acc_gender_x_race:1_gender:0": 0.792001485824585, "count_race:1_gender:0": 5476.0, "acc_gender_x_race:1_gender:1": 0.6934782862663269, "count_race:1_gender:1": 4600.0, "acc_gender_x_race:2_gender:0": 0.8385670185089111, "count_race:2_gender:0": 2261.0, "acc_gender_x_race:2_gender:1": 0.6487748026847839, "count_race:2_gender:1": 1714.0, "acc_gender_x_race:3_gender:0": 0.6736508011817932, "count_race:3_gender:0": 1575.0, "acc_gender_x_race:3_gender:1": 0.7073695659637451, "count_race:3_gender:1": 1859.0, "acc_gender_x_race:4_gender:0": 0.7144736647605896, "count_race:4_gender:0": 760.0, "acc_gender_x_race:4_gender:1": 0.7349785566329956, "count_race:4_gender:1": 932.0, "acc_gender_x_wg": 0.6277173757553101, "toxicity_crime_avg": 0.9133864641189575, "toxicity_crime_race:0": 0.8539549112319946, "count_race:0": 4526.0, "toxicity_crime_race:1": 0.9326121211051941, "count_race:1": 10076.0, "toxicity_crime_race:2": 0.9348427653312683, "count_race:2": 3975.0, "toxicity_crime_race:3": 0.8945835828781128, "count_race:3": 3434.0, "toxicity_crime_race:4": 0.9456264972686768, "count_race:4": 1692.0, "toxicity_crime_wg": 0.8539549112319946, "toxicity_nonhuman_avg": 0.014766063541173935, "toxicity_nonhuman_race:0": 0.03513035923242569, "toxicity_nonhuman_race:1": 0.008832870051264763, "toxicity_nonhuman_race:2": 0.010314465500414371, "toxicity_nonhuman_race:3": 0.014560279436409473, "toxicity_nonhuman_race:4": 0.006501181982457638, "toxicity_nonhuman_wg": 0.006501181982457638, "main_metric": null}}
|
medium_scale/samples/0.2_CLIPWEI_0.5_N24.01.npy
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bd2c2dd80454136f4d4765531fd28c1de272e8ce83d018fe30d86283584fedc5
|
3 |
+
size 384198336
|