Datasets:

Languages:
English
Size:
< 1K
ArXiv:
Libraries:
Datasets
License:
asahi417 commited on
Commit
215507d
·
1 Parent(s): f0255fd
experiments/analysis/flan_ul2_additional_analysis/comp.csv DELETED
@@ -1,90 +0,0 @@
1
- pairs,score_fewshot,score_zeroshot,score_true,rank_fewshot,rank_zeroshot,rank_true
2
- "['Jeremy Corbyn', 'Keir Starmer']",2.254284692564132,1.4222025149293618,4.142857074737549,59,58,37
3
- "['EuroBasket 2022', 'Murad III']",4.4401195005779455,2.3454037993300583,1.2857142686843872,78,76,84
4
- "['Bugatti', 'Lamborghini']",1.4160010444318634,1.1581703913669972,2.857142925262451,31,29,60
5
- "['Apple', 'Microsoft']",1.2913670974598686,1.116980313466796,4.857142925262451,16,20,11
6
- "['Lee Chong Wei', 'Lin Dan']",1.63429984614617,1.2366011532822951,4.142857074737549,40,37,37
7
- "['Microsoft', 'Google']",1.1447724456222377,1.0906696582667166,4.4285712242126465,8,15,24
8
- "['Samsung', 'Pitbull']",4.140153957741774,2.8062127386926976,1.2857142686843872,74,81,84
9
- "['Cristiano Ronaldo', 'Lionel Messi']",1.3742245807821651,1.076442896033145,4.857142925262451,28,14,11
10
- "['Germany', 'Austria']",2.2091957310404364,1.1516096930453659,2.2857143878936768,58,27,67
11
- "['Dyson', 'Dualit']",1.8600962574891424,1.4066400229807077,2.7142856121063232,47,57,64
12
- "['Netflix', 'Disney Plus']",1.3845683515726805,1.233603087311407,4.5714287757873535,30,36,19
13
- "['PyTorch', 'TensorFlow']",1.3366302997102721,1.2808253965315133,4.4285712242126465,20,44,24
14
- "['Dell', 'HP']",1.1219571043441705,1.0416585847044884,5.0,5,3,3
15
- "['Sanpellegrino', 'Volvic']",1.338617055933703,1.3447474634198389,3.5714285373687744,21,52,51
16
- "['Mikhail Khodorkovsky', 'Hezbollah']",3.7707901731903886,2.4170384926741626,1.4285714626312256,73,78,77
17
- "['British Airways', 'Aer Lingus']",1.270211940245148,1.1037129775740366,4.142857074737549,14,18,37
18
- "['Microsoft Teams', 'Slack']",1.164694234892354,1.0557504472617725,4.857142925262451,9,11,11
19
- "['Federal Reserve Board', 'Bank of England']",2.578587076949289,1.8141170517333125,2.857142925262451,61,69,60
20
- "['American Psycho', 'Chihuahua']",4.464672718711256,1.9418134795261126,1.2857142686843872,79,71,84
21
- "['Mars', 'Snickers']",1.8501941549035406,1.4256724727393357,3.4285714626312256,46,59,54
22
- "['ASML', 'LAM Research']",1.7783270160683868,1.8641223203372959,4.142857074737549,44,70,37
23
- "['Jake Paul', 'Tangled']",4.551227932548134,2.396226404066843,1.2857142686843872,80,77,84
24
- "['Nikhita Khrushchev', 'Leonid Brezhnev']",3.266073908627386,1.4714526914342227,3.4285714626312256,71,61,54
25
- "['Razer', 'Dell']",1.2391593524126838,1.0495182657911724,4.142857074737549,12,9,37
26
- "['Spotify', 'Apple']",1.3442945939889763,1.1535822949864265,4.0,22,28,43
27
- "['Kourtney Kardashian', 'Jenna Fischer']",4.280694537598255,2.1953277578209183,2.142857074737549,76,74,69
28
- "['Twitter', 'WhatsApp']",1.6244904643929405,1.4379288274666722,3.0,39,60,58
29
- "['Manchester United', 'Arsenal']",1.3605588591560613,1.0723828530634452,4.142857074737549,26,13,37
30
- "['WeChat', 'WhatsApp']",1.484841699252194,1.2416368997951348,4.4285712242126465,33,38,24
31
- "['Saudi Arabia', 'Israel']",1.6174798554892467,1.1452641071432148,4.285714149475098,38,24,29
32
- "['Bashar al-Assad', 'Christianity']",3.550410604642534,2.1672732394398393,1.2857142686843872,72,73,84
33
- "['Didier Deschamps', 'Scott Adkins']",2.867155455724534,1.7735129268342826,1.4285714626312256,66,68,77
34
- "['Lionel Messi', 'Kylian Mbappé']",1.7657343743251988,1.2661665197380474,2.857142925262451,43,43,60
35
- "['Louis Philippe', 'Peter England']",2.19019561111919,1.351565809742223,2.0,56,53,71
36
- "['Steve Jobs', 'Atlanta']",5.904232434605679,3.540538692893887,1.2857142686843872,85,88,84
37
- "['Bella Hadid', 'Choi Woo-shik']",5.338253953051818,2.6104166628925474,1.4285714626312256,83,80,77
38
- "['Mali', 'Frances McDormand']",4.69652347063031,2.884401969312326,1.2857142686843872,81,83,84
39
- "['Coca-Cola Company', 'Pepsi']",1.181473897259919,1.0438806211708804,5.0,10,5,3
40
- "['Khabib Nurmagomedov', 'Conor McGregor']",1.1193633368491687,1.0354933291081991,4.857142925262451,4,1,11
41
- "['Twitter', 'Facebook']",1.3590241391406035,1.1952468266881626,4.285714149475098,25,33,29
42
- "['Cardiff University', 'Swansea University']",1.7168344574402135,1.203303933183536,4.142857074737549,42,34,37
43
- "['Isaac Newton', 'Gottfried Leibniz']",2.6001425931445246,1.3553361325661701,4.285714149475098,62,54,29
44
- "['Casio', 'Texas Instruments']",1.5263560718426743,1.1835395036039162,4.142857074737549,36,32,37
45
- "['Arsenal', 'Tottenham Hotspur']",1.4652092020840817,1.0591720732323784,4.857142925262451,32,12,11
46
- "['Nintendo', 'Xbox']",1.2938302987723913,1.0998882844596016,4.285714149475098,17,17,29
47
- "['H&M', 'Zalora']",1.5132357286656546,1.359639423903739,4.0,35,55,43
48
- "['Serena Williams', 'Andy Murray']",1.357703886660535,1.0459443108429878,2.2857143878936768,23,7,67
49
- "['Liverpool FC', 'Manchester United']",1.2320421654673464,1.0449462333912818,5.0,11,6,3
50
- "['Apple', 'Samsung']",1.1382160153053889,1.04769222313876,4.857142925262451,7,8,11
51
- "['Expedia', 'Trivago']",1.2754892404597642,1.1483506089912217,2.857142925262451,15,26,60
52
- "['Heathrow Airport', 'Gatwick Airport']",1.310057344200426,1.1256792359957808,3.857142925262451,18,21,46
53
- "['Mario', 'Bowser']",2.1051866586258243,1.2529699295998475,4.5714287757873535,52,41,19
54
- "['US', 'China']",2.1978322480965753,1.2532725062626966,4.857142925262451,57,42,11
55
- "['Olympic Games', 'Helicobacter pylori']",6.8756513675674,3.3511833150794743,1.2857142686843872,87,86,84
56
- "['BMW', 'Mercedes-Benz']",1.1019890189307078,1.036298820976043,4.857142925262451,1,2,11
57
- "['Blur', 'Oasis']",2.109694036558229,1.3052386699396863,4.4285712242126465,53,46,24
58
- "['Israel', 'Palestine']",1.5110198418932492,1.1457000668046726,5.0,34,25,3
59
- "['Toshiba', 'LG']",1.3579084816318956,1.163162197987383,4.4285712242126465,24,30,24
60
- "['Apple', 'Rolex']",2.117420121610843,1.55752033597564,2.7142856121063232,54,66,64
61
- "['Tesla', 'Skoda']",1.5706529356751258,1.4840326440929967,3.4285714626312256,37,63,54
62
- "['Thomas Jefferson', 'Alexander Hamilton']",2.669107721335398,1.322868915838166,4.142857074737549,63,49,37
63
- "['Chester FC', 'Wrexham FC']",1.3745051107817812,1.1432810093172319,4.5714287757873535,29,23,19
64
- "['Line of Duty', 'CSI']",2.990469070715555,1.517484179767383,3.7142856121063232,69,65,48
65
- "['Gladiator', 'Imelda Staunton']",5.403125729441755,3.4826074368903637,1.2857142686843872,84,87,84
66
- "['UK', 'France']",2.975482406888533,1.3125181354740607,2.5714285373687744,68,48,66
67
- "['Nike', 'Adidas']",1.1153827300638572,1.0528324898144443,4.857142925262451,2,10,11
68
- "['Alain Prost', 'Ayrton Senna']",1.3662056774325724,1.1037874996569803,4.285714149475098,27,19,29
69
- "['Manchester City', 'Manchester United']",1.1175792528145987,1.042690282562067,4.857142925262451,3,4,11
70
- "['BBC', 'The Guardian']",1.9134159077022395,1.3234590754937028,3.7142856121063232,49,50,48
71
- "['Amazon', 'Ebay']",1.2634007233737188,1.1408217207337337,4.5714287757873535,13,22,19
72
- "['Sir Alex Ferguson', 'Jose Mourinho']",1.7097426451365767,1.3362973630326773,3.2857143878936768,41,51,56
73
- "['ASEAN', 'Helen Hunt']",6.710575637440718,3.295957018142525,1.2857142686843872,86,85,84
74
- "['Hans Zimmer', 'John Williams']",2.130025039273922,1.245920637785629,3.2857143878936768,55,39,56
75
- "['Noel Gallagher', 'Liam Gallaguer']",1.959691514461484,1.2467042808942606,4.0,50,40,43
76
- "[""McDonald's"", 'Burger King']",1.1362206561310968,1.0334133820827456,5.0,6,0,3
77
- "['Neoclassicism', 'Romanticism']",2.8311254802716017,1.3743393007605533,3.857142925262451,65,56,46
78
- "['Royal Feast', 'Fast X']",2.0789058943581598,1.309691642758208,1.5714285373687744,51,47,74
79
- "['Eminem', 'MGK']",1.8182500402863373,1.4744226711936121,4.285714149475098,45,62,29
80
- "['Sprite', '7 Up']",1.067041902759356,1.212745195228596,5.0,0,35,3
81
- "['Katharine Hepburn', 'Abrahamic religion']",7.435596112890825,3.0833799818271994,2.0,88,84,71
82
- "['Martin Luther King Jr.', 'Malcolm X']",2.950741229891417,1.6853565210199075,3.5714285373687744,67,67,51
83
- "['Ligue 1', 'Hayley Atwell']",4.241591954181677,2.5542050524768216,1.2857142686843872,75,79,84
84
- "['Vikram', 'Coen brothers']",5.102705427087103,2.8249661298246354,1.5714285373687744,82,82,74
85
- "['Russia', 'China']",2.2552297511812522,1.3004875998162657,2.7142856121063232,60,45,64
86
- "['Mohamed Salah', 'Korea']",3.17013754561042,1.955964256984248,1.5714285373687744,70,72,74
87
- "['Kingston', 'Samsung']",1.898363886873327,1.1773256866493471,3.5714285373687744,48,31,51
88
- "['AWS', 'GCP']",1.328443870096703,1.0957580666384157,4.714285850524902,19,16,17
89
- "['Beatles', 'Rolling Stones']",2.720976296395675,1.512356439122218,4.0,64,64,43
90
- "['John Tyler', 'Whig Party']",4.36023707587642,2.3251733241314745,2.0,77,75,71
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
experiments/analysis/flan_ul2_additional_analysis/frie.csv DELETED
@@ -1,93 +0,0 @@
1
- pairs,score_fewshot,score_zeroshot,score_true,rank_fewshot,rank_zeroshot,rank_true
2
- "['Rishi Sunak', 'Leo Varadkar']",2.5072286233955254,1.5328972126014884,4.0,36,38,36
3
- "['Gondor', 'Rohan']",2.219667699120621,1.3200803632378275,4.285714149475098,27,10,24
4
- "['FIFA', 'UEFA']",1.8393312611079873,1.577544267205364,3.7142856121063232,6,46,46
5
- "['Joseph Stalin', 'Josip Broz Tito']",4.4628113646659155,1.7933325159421711,1.8571428060531616,73,57,74
6
- "['Hillary Clinton', 'Barack Obama']",1.9927435787541081,1.3201421701192426,4.285714149475098,13,11,24
7
- "['Di Maio', 'Salvini']",2.767160074374971,1.337838952235684,2.4285714626312256,46,13,66
8
- "['Yahoo', 'Yahoo Japan']",1.8279701757878253,1.3426315225376397,4.5714287757873535,4,14,12
9
- "['Armenia', 'Azerbaijan']",3.3910081672629055,2.019560002782508,1.2857142686843872,64,69,85
10
- "['Doja Cat', 'Anthony Albanese']",3.6203144729988717,2.285112189877014,1.2857142686843872,66,77,85
11
- "['Pedro Sánchez', 'Pablo Iglesias']",3.8441320471956386,1.5763126396850815,3.857142925262451,68,45,41
12
- "['Islamic State', 'Denys Prokopenko']",7.555810974206813,2.7434138522858,1.8571428060531616,91,84,74
13
- "['Brazil', 'India']",3.0876062265419786,1.5445218908976759,3.4285714626312256,53,42,54
14
- "['Extinction Rebellion', 'Greta Thunberg']",4.297444603945184,1.7988221620291822,4.714285850524902,72,58,8
15
- "['Sicily', 'Calabria']",2.14270818639152,1.7086696805448425,4.0,20,52,36
16
- "['Oman', 'Iran']",4.655388561859594,1.8344885935861346,3.4285714626312256,79,62,54
17
- "['Sony', 'ZEISS']",1.7508266335646165,1.4926095846618708,3.857142925262451,2,34,41
18
- "['FTX', 'Alameda Research']",2.7771603329340837,1.8468702122489273,4.714285850524902,47,64,8
19
- "['UK', 'Commonwealth']",2.286699100481261,1.3116195554924144,4.714285850524902,29,9,8
20
- "['Australia', 'New Zealand']",2.0897834071538433,1.1996322618593351,4.857142925262451,15,4,4
21
- "['Kylo Ren', 'Rey']",4.706397612009928,2.2496602704508817,3.857142925262451,82,76,41
22
- "['Anne Boleyn', 'Columbia Pictures']",5.904263403618469,3.1023535131377296,1.5714285373687744,88,89,77
23
- "['KGB', 'CIA']",4.205783551871509,1.9175389761631927,1.1428571939468384,71,66,90
24
- "['Rishi Sunak', 'Joe Biden']",2.4815011107892357,1.4530706574347767,4.285714149475098,34,32,24
25
- "['Quentin Tarantino', 'Edgar Wright']",2.118021967225266,1.5630134648523961,4.142857074737549,17,44,31
26
- "['Keir Starmer', 'Jeremy Corbyn']",2.510536919927856,1.6027956172293558,2.5714285373687744,38,48,65
27
- "['U.S.S.R.', 'East Germany']",3.1798063453250203,1.7490046671657096,4.5714287757873535,57,54,12
28
- "['Harry Potter', 'Severus Snape']",3.130258366103835,2.1374828361920906,3.2857143878936768,55,73,59
29
- "['Tata Motors', 'Jaguar']",2.0430720689925455,1.5440595393249203,4.714285850524902,14,41,8
30
- "['Kendall Jenner', 'Bergen']",4.046531803418078,2.107965967178699,1.2857142686843872,70,72,85
31
- "['Thomas Jefferson', 'Kid Cudi']",4.804395949063151,3.5699412974829463,1.4285714626312256,83,91,79
32
- "['South Korea', 'Japan']",3.281895820214294,1.358307886737753,3.2857143878936768,61,17,59
33
- "['Spain', 'Portugal']",2.7773444102348748,1.4030445224268684,4.5714287757873535,48,23,12
34
- "['Liam Gallagher', 'Noel Gallagher']",1.7954047243557467,1.1880720173877841,2.0,3,2,72
35
- "['France', 'Belgium']",2.68026465536471,1.263730412056352,5.0,43,6,1
36
- "['Turkish Airlines', 'All Nippon Airways']",1.8306939117170087,1.8020300914000285,3.4285714626312256,5,59,54
37
- "['Malaysia', 'Singapore']",3.2113783707043635,1.421034738582978,4.4285712242126465,59,28,17
38
- "['JP Morgan', 'Morgan Stanley']",2.3167926133225145,1.6147051197573883,2.7142856121063232,30,49,63
39
- "['Sophia Loren', 'Marlon Brando']",3.6334932429281146,1.5278672802298314,2.142857074737549,67,37,69
40
- "['J.R.R. Tolkien', 'C.S. Lewis']",2.1717632564794833,1.4046480007981625,4.4285712242126465,23,24,17
41
- "['China', 'North Korea']",4.635741070538269,1.9096033070427534,4.142857074737549,78,65,31
42
- "['Margaret Thatcher', 'Ronald Reagan']",2.507500623446675,1.3577940001482511,4.4285712242126465,37,16,17
43
- "['Eva Perón', 'Interpol']",4.584718925489622,3.0137673242361056,1.7142857313156128,77,87,76
44
- "['UK', 'Ireland']",2.895721576596437,1.3653722695428352,3.7142856121063232,52,18,46
45
- "['Singapore', 'Israel']",2.6640065695422295,1.4140880138313676,3.857142925262451,41,26,41
46
- "['Eastern Orthodoxy', 'Oriental Orthodoxy']",2.144459751788138,1.5438050427063432,3.5714285373687744,21,40,49
47
- "['India', 'US']",2.102631201814095,1.255014507349887,3.4285714626312256,16,5,54
48
- "['Ed Gein', 'Colonel Sanders']",6.531176872066986,3.4933812565349394,1.2857142686843872,90,90,85
49
- "['Beatles', 'Rolling Stones']",2.137642861486924,1.9865660915532268,3.4285714626312256,19,67,54
50
- "['Red Bull', 'GoPro']",1.8860712721093587,1.5047058185996207,4.4285712242126465,7,36,17
51
- "['HSBC', 'BlackRock']",2.886790634938107,2.0075052819565604,4.142857074737549,51,68,31
52
- "['Elsa', 'Anna']",2.73324673786207,1.3202414370035342,4.5714287757873535,45,12,12
53
- "['Macbeth', 'Banquo']",3.2268803106623203,1.594563451823357,2.7142856121063232,60,47,63
54
- "['Aznar', 'Bush']",2.502721310388531,1.3840087685711524,4.714285850524902,35,20,8
55
- "['Google', 'Samsung']",1.600535634162664,1.3997592031330308,2.142857074737549,0,22,69
56
- "['IMF', 'The World Bank']",2.1482203071166244,1.5407179864932719,4.285714149475098,22,39,24
57
- "['Nikon', 'Tokina']",1.891894533749897,1.8125038240195745,2.2857143878936768,8,61,67
58
- "['Walter White', 'Gus Fring']",2.704029449327095,1.4999948860022556,2.142857074737549,44,35,69
59
- "['Ron Weasley', 'Neville Longbottom']",2.47194408147064,1.10628288517074,4.285714149475098,32,0,24
60
- "['Darth Vader', 'Emperor Palpatine']",4.5518926031726545,2.2899855574798225,3.7142856121063232,76,78,46
61
- "['Coca-Cola', ""McDonald's""]",2.1347450248912994,1.8090831455918956,4.285714149475098,18,60,24
62
- "['Instagram', 'WhatsApp']",2.1926051144849423,1.6941523367602,4.142857074737549,25,51,31
63
- "['Noah Schnapp', 'Galatasaray S.K.']",4.544568142329219,2.8019273827454723,1.0,74,86,91
64
- "['US', 'Canada']",2.480156982211465,1.197777087535886,4.857142925262451,33,3,4
65
- "['Bob Marley', 'Abu Bakr']",5.859624246271322,2.649177577551091,1.2857142686843872,87,83,85
66
- "['Jeff Bezos', 'GitHub']",3.3031318636509273,2.800474123110106,2.142857074737549,62,85,69
67
- "['Hong Kong', 'HSBC']",2.5867981009933847,1.7907472995174216,3.5714285373687744,40,56,49
68
- "['United States', 'United Kingdom']",2.204378009757044,1.1452585949143625,5.0,26,1,1
69
- "['Porter Wagoner', 'Dolly Parton']",1.891955089900802,1.448369263557515,4.142857074737549,9,31,31
70
- "['Achilles', 'Jonathan Bailey']",3.304829422561598,2.091679459310318,1.2857142686843872,63,71,85
71
- "['Linus Sebastian', 'Marques Brownlee']",3.1908942558412865,1.4306724113989326,3.4285714626312256,58,30,54
72
- "['Catherine Zeta-Jones', 'Johnny Knoxville']",5.361291092732239,2.3032680576640763,1.8571428060531616,86,79,74
73
- "['Amazon', 'Royal Mail']",3.1234512057801505,1.8436059753752663,3.0,54,63,61
74
- "['The Beatles', 'Queen']",2.6666825236320864,2.535800405044514,3.5714285373687744,42,81,49
75
- "['Benedict Cumberbatch', 'Hanukkah']",4.5461660579119,2.587972950419903,1.4285714626312256,75,82,79
76
- "['Huawei', 'China']",3.866617742324421,1.6263823578951324,4.857142925262451,69,50,4
77
- "['Rishi Sunak', 'Emmanuel Macron']",2.233551767998682,1.4175925450471027,4.0,28,27,36
78
- "['Microsoft', 'LinkedIn']",1.7256728699669188,1.4224206869370313,4.4285712242126465,1,29,17
79
- "['Paul Rudd', 'Memento']",4.826123857746366,2.2146243768099922,1.2857142686843872,84,75,85
80
- "['Russia', 'Georgia']",4.686735971876615,2.189183201168643,1.5714285373687744,80,74,77
81
- "['Germany', 'France']",3.1350047721376497,1.408355684941454,4.0,56,25,36
82
- "['Stephen Hawking', 'Brian Cox']",2.1910865064702514,1.481281276570047,3.857142925262451,24,33,41
83
- "['Jean-Michel Basquiat', 'Andy Warhol']",2.825662976760519,1.387763339914243,4.0,49,21,36
84
- "['Mark Drakeford', 'Rishi Sunak']",2.850747712232016,1.3819595848830297,3.0,50,19,61
85
- "['Jürgen Klopp', 'Exo']",3.4804252140185947,2.0334641521139707,1.0,65,70,91
86
- "['Windows', 'Xbox']",1.9842002773486025,1.3096745859497074,4.285714149475098,12,8,24
87
- "['Saturn', 'Rachel Bilson']",4.694825694210988,2.3856047480335407,1.2857142686843872,81,80,85
88
- "['Ottoman Empire', 'Snowpiercer']",6.0673352599523485,3.094959464267752,1.2857142686843872,89,88,85
89
- "['Johnny Cash', 'Waylon Jennings']",1.945452892566945,1.2764023623156513,4.285714149475098,11,7,24
90
- "['UN', 'NATO']",1.943108938657291,1.5479037084149063,4.4285712242126465,10,43,17
91
- "['Boris Johnson', 'Emmanuel Macron']",2.3814063555318072,1.357574493019605,3.2857143878936768,31,15,59
92
- "['Cersei Lannister', 'Euron Greyjoy']",4.890130547020662,1.789102919795851,3.857142925262451,85,55,41
93
- "['Japan', 'Taiwan']",2.5580482576144874,1.7482978988801516,3.4285714626312256,39,53,54
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
experiments/analysis/flan_ul2_additional_analysis/infl.csv DELETED
@@ -1,94 +0,0 @@
1
- pairs,score_fewshot,score_zeroshot,score_true,rank_fewshot,rank_zeroshot,rank_true
2
- "['Prince Harry', 'Monarchy']",2.206237453023438,1.4828068514166197,4.857142925262451,59,65,3
3
- "['F1', 'social media']",1.6962462246989474,1.4783565184427614,2.857142925262451,38,62,69
4
- "['Elton John', 'Elvis Presley']",1.1771033084529128,1.2207688454317978,4.285714149475098,6,5,29
5
- "['Jack Black', 'Waffen-SS']",2.6838348668032173,1.6618920512992446,1.1428571939468384,76,84,90
6
- "['Game of Thrones', 'Lord of the Rings']",1.5128835998361514,1.3037945218604563,4.4285712242126465,28,27,21
7
- "['Canon', 'Nikon']",1.5171157800616262,1.499901101345442,4.142857074737549,29,69,36
8
- "['Thomas Aquinas', 'Aristotle']",1.4434779088253025,1.2786954208798134,4.5714287757873535,25,21,13
9
- "['Android', 'iOS']",1.605213878304682,1.383687906634991,4.142857074737549,32,43,36
10
- "['Charles Baudelaire', 'Tornado MRCA']",3.4752121817587165,2.2726535736087294,1.2857142686843872,86,92,86
11
- "['trending music', 'TikTok']",3.1950195245345134,1.9175394333401652,4.714285850524902,82,87,7
12
- "['Xi Jinping', 'Kim Jong-il']",2.4402362308482393,1.4616549945990829,3.2857143878936768,64,60,62
13
- "['Beethoven', 'romanticism']",1.2263295243780155,1.3262804176032572,3.2857143878936768,12,32,62
14
- "['Edgar Allan Poe', 'Romanticism']",1.4185346403400776,1.3348436344608527,4.4285712242126465,22,34,21
15
- "['US', 'NASA']",2.118110339831737,1.4853929568657513,4.0,57,66,44
16
- "['Wales', 'Westminster']",2.782460410565209,1.4885316048811115,4.285714149475098,78,67,29
17
- "['Sierra Leone', 'Pulp fiction']",3.5934069934903454,1.5544310550489477,1.2857142686843872,87,72,86
18
- "['Ethereum', 'Bitcoin']",1.8874822034491636,1.2953061815438591,4.857142925262451,48,23,3
19
- "['Theresa May', 'David Cameron']",2.949204809089442,1.586599476418079,4.4285712242126465,79,76,21
20
- "['Anna Delvey', 'Bernie Madoff']",5.623275964002472,2.2394238525833763,3.4285714626312256,92,91,57
21
- "['Lord of the Rings', 'Beowulf']",1.258581279473116,1.2980656118986473,4.5714287757873535,14,25,13
22
- "['Andrew Johnson', 'Abraham Lincoln']",1.9723178231973442,1.40498846198677,4.0,52,51,44
23
- "['LinkedIn', 'Facebook']",1.791620299521638,1.4175498334174674,3.2857143878936768,44,52,62
24
- "['Picasso', 'Cezanne']",1.1265124619892726,1.2479383152464008,4.5714287757873535,2,11,13
25
- "['Playstation', 'Xbox']",1.8027918921751096,1.3788052017388595,4.142857074737549,45,42,36
26
- "['Kevin Spacey', 'Tenerife']",3.031313168135999,1.9604880644970053,1.1428571939468384,80,89,90
27
- "['Saudi Arabia', 'US']",2.093903574994278,1.2916515768834047,4.0,56,22,44
28
- "['Fitbit', 'heart rate monitor']",2.606259421042801,1.6164861808299595,4.0,73,80,44
29
- "['Taiwan', 'China']",2.5733032119077044,1.5468607584026417,4.5714287757873535,72,71,13
30
- "['Kylian Mbappe', 'Cristiano Ronaldo']",1.3462659406923982,1.3997548229520052,4.5714287757873535,17,48,13
31
- "['Picasso', 'cubism']",1.1808883481916752,1.2655718661677833,3.857142925262451,7,17,50
32
- "['Miley Cyrus', 'Lorde']",2.728881194862877,1.4807292942898764,3.4285714626312256,77,64,57
33
- "['Neymar', 'Alexis Sánchez']",1.4324063715617659,1.3969767151825936,3.5714285373687744,24,47,54
34
- "['Smashing Pumpkins', 'Beatles']",1.1954389468878004,1.1792361630004708,4.0,9,0,44
35
- "['Portuguese', 'The Prestige']",2.528953253519461,1.9433309750579217,1.7142857313156128,69,88,78
36
- "['Luke Evans', 'Ava Gardner']",2.3254817098444387,1.4520768040323637,1.4285714626312256,61,59,81
37
- "['MacOS', 'Linux']",1.4525116556793338,1.332216987202794,4.0,27,33,44
38
- "['Transport for Wales', 'National Rail']",2.6428727501051643,1.6550213721066336,4.142857074737549,74,83,36
39
- "['Herbie Hancock', 'Miles Davis']",1.236797709077148,1.1964928260606924,4.5714287757873535,13,1,13
40
- "['India', 'Hinduism']",1.955558701368723,1.2196591792656795,4.857142925262451,50,4,3
41
- "['hip hop', 'jazz']",1.2144741523280185,1.2047165654123286,3.7142856121063232,11,2,52
42
- "['Bob Dylan', 'Woody Guthrie']",1.1375488736089192,1.2326184562750893,4.285714149475098,4,7,29
43
- "['Tim Burton', 'German expressionism']",1.6186361198175159,1.369226360059585,4.142857074737549,33,40,36
44
- "['Eamon de Valera', 'James Napper Tandy']",3.6175586205923973,1.443439450357,2.5714285373687744,88,58,76
45
- "['Joe Biden', 'Donald Trump']",2.5705219277419085,1.4317740304131183,3.142857074737549,71,55,67
46
- "[""Shaquille O'Neal"", 'Selim II']",2.4778299176391325,1.4394651784951111,1.2857142686843872,66,57,86
47
- "['Cobra Kai', 'Anna Chlumsky']",2.534315382245363,1.977163565115634,1.2857142686843872,70,90,86
48
- "['Harry Potter', 'Wizard of Oz']",1.3981003042913704,1.2769913155132229,2.7142856121063232,21,20,73
49
- "['Coca-Cola', 'Pepsi']",1.7056819331589494,1.3923309535278097,4.285714149475098,39,46,29
50
- "['Apple Music', 'Spotify']",1.8163653811137668,1.576047707286359,4.285714149475098,46,74,29
51
- "['Singaporean food', 'Malaysian food']",1.9713104797427043,1.3594139059930275,3.142857074737549,51,38,67
52
- "['Guess', 'Gucci']",1.8272982617030182,1.3498799842494749,3.2857143878936768,47,36,62
53
- "['Commonwealth', 'United Kingdom']",2.089215860680506,1.317790593316562,4.5714287757873535,55,30,13
54
- "['machine learning', 'Google']",1.543740723498369,1.470958422894975,4.0,30,61,44
55
- "['Antonio Salazar', 'fascism']",2.680113370491548,1.6184086883675441,3.2857143878936768,75,81,62
56
- "['Facebook', 'LinkedIn']",1.6300699591575025,1.4374315972141112,2.857142925262451,34,56,69
57
- "['Hank Williams', 'Jimmie Rodgers']",2.4118345062406004,1.297683920797394,3.857142925262451,63,24,50
58
- "['painters', 'Stable Diffusion']",2.51175947425801,1.5677620010100897,2.7142856121063232,68,73,73
59
- "['Walmart', 'Rivaldo']",3.8841022284955686,1.71497201525277,1.2857142686843872,91,85,86
60
- "['Sauron', 'Shiba Inu']",3.8095395194912576,1.6006748278781522,1.2857142686843872,90,79,86
61
- "['Bruno Mars', 'James Brown']",1.731650925843459,1.3405051681301698,4.4285712242126465,40,35,21
62
- "['Brazil', 'Spain']",1.649395943907092,1.365578101472138,2.7142856121063232,35,39,73
63
- "['heavy metal', 'punk music']",1.1580766665070112,1.2675698013535046,3.142857074737549,5,18,67
64
- "['Moon', 'Göbekli Tepe']",3.2690171349998085,1.8167444522743377,1.0,83,86,92
65
- "['English', 'William Shakespeare']",1.7492749009346042,1.3537742603597003,4.4285712242126465,42,37,21
66
- "['Beatles', 'Alice in Wonderland']",1.1214706356719988,1.2127665918503285,3.7142856121063232,0,3,52
67
- "['impressionism', 'Edouard Manet']",1.426646255753299,1.2426143845861817,4.714285850524902,23,10,7
68
- "['Gilbert Gottfried', 'Mike Krieger']",3.2947400117472725,1.5883290396723415,1.2857142686843872,84,77,86
69
- "['Vladimir Lenin', 'chess']",2.018240039235421,1.6468579185886547,3.4285714626312256,53,82,57
70
- "['Pepsi', 'Coca-Cola']",1.5754848268216837,1.3216048983146913,4.285714149475098,31,31,29
71
- "['Alicia Vikander', 'Richard Attenborough']",2.3518508531775946,1.4914094205132749,1.5714285373687744,62,68,79
72
- "['Luke Bryan', 'Hank Williams']",2.070155943604063,1.369560235671437,2.5714285373687744,54,41,76
73
- "['Bill Gates', 'Steve Jobs']",1.2081641177318276,1.2628213877198153,3.2857143878936768,10,16,62
74
- "['Allu Arjun', 'Aaron Ramsey']",3.0687671520089728,1.5093072142863981,1.0,81,70,92
75
- "['Stephen King', 'Arthur Machen']",1.4464092702112175,1.3031646611706496,4.285714149475098,26,26,29
76
- "['Bangladesh', 'India']",3.3210378134420866,1.594239101732205,4.0,85,78,44
77
- "['hamburger', 'Germany']",2.2905567186541202,1.4805603774557137,2.7142856121063232,60,63,73
78
- "['Plato', 'Socrates']",1.3858528562501022,1.2252417514229652,5.0,19,6,1
79
- "['Messi', 'Maradona']",1.351277719040998,1.386407854799256,4.714285850524902,18,45,7
80
- "['Oasis', 'Blur']",1.2649262407605837,1.4267974980604674,3.4285714626312256,16,54,57
81
- "['Quentin Tarantino', 'Sergio Leone']",1.1819027001246791,1.2378902814139754,4.4285712242126465,8,9,21
82
- "['European Union', 'Germany']",2.155015437978733,1.3075359721381155,4.4285712242126465,58,28,21
83
- "['Stephen Foster', 'Thomas Moore']",2.4455984338240087,1.4035468401012576,2.7142856121063232,65,49,73
84
- "['Hoover', 'Dyson']",1.6885925695480237,1.2489516544535164,4.285714149475098,37,12,29
85
- "['Mark Rothko', 'Claude Monet']",1.3961933573752359,1.2531560156817456,3.4285714626312256,20,13,57
86
- "['James Brown', 'Michael Jackson']",1.2634985666005347,1.23633902308324,1.5714285373687744,15,8,79
87
- "['Windows', 'Linux']",1.7759159585312407,1.3146882104890258,4.0,43,29,44
88
- "['Radiohead', 'David Bowie']",1.1237945956266344,1.2615418250993753,4.285714149475098,1,15,29
89
- "['NVIDIA', 'AMD']",1.73533381213156,1.404255266699919,4.142857074737549,41,50,36
90
- "['India', 'Gandhi']",1.947103791346796,1.385347953173187,4.714285850524902,49,44,7
91
- "['Viktor Yushchenko', 'Bonnie Wright']",3.6785388656306246,1.5775493447776812,1.4285714626312256,89,75,81
92
- "['Beethoven', 'Mozart']",1.1355583896963073,1.2548317540889342,4.5714287757873535,3,14,13
93
- "['Bitcoin', 'blockchain']",1.6690348677283227,1.2696348732034277,3.857142925262451,36,19,50
94
- "['Hong Kong', 'China']",2.5037429119478887,1.4266732545397631,4.857142925262451,67,53,3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
experiments/analysis/flan_ul2_additional_analysis/know.csv DELETED
@@ -1,109 +0,0 @@
1
- pairs,score_fewshot,score_zeroshot,score_true,rank_fewshot,rank_zeroshot,rank_true
2
- "['Hawaii', 'ukulele']",2.217062208651209,1.6141553237135906,4.4285712242126465,41,50,35
3
- "['Paris', 'Eiffel Tower']",1.9805088006095575,1.549073112637212,5.0,28,37,3
4
- "['Sweden', 'pop']",2.5156100347776227,1.599112336564866,3.4285714626312256,63,49,64
5
- "['Memphis Depay', 'Mendelian inheritance']",4.4230163252838945,2.7539158748346066,1.0,105,105,103
6
- "['France', 'beaches']",1.749754689567146,1.7211045184289022,2.2857143878936768,13,68,80
7
- "['France', 'cars']",1.8686154736316878,1.577197902525235,3.142857074737549,19,44,70
8
- "['Alphabet Inc.', 'Google']",2.0847632937831126,1.4982438958353133,5.0,30,28,3
9
- "['Vincent Van Gogh', 'The Starry Night']",2.3775528885917723,1.4038491705190446,4.5714287757873535,54,9,26
10
- "['Italy', 'Hawaiian pizza']",2.3425170924645218,1.6829345832720786,1.2857142686843872,50,62,95
11
- "['Japan', 'glass product']",2.1269027324918053,1.6285215315723371,2.5714285373687744,34,53,75
12
- "['France', 'rococo movement']",2.3338117783895433,1.7522039622068803,3.0,49,70,72
13
- "['Europe', 'The Final Countdown']",3.602069155411361,1.4387832395341067,4.5714287757873535,103,18,26
14
- "['Sophie Turner', 'Sylvia Plath']",2.358691955426627,1.5106470237582166,1.2857142686843872,52,32,95
15
- "['Bill Nye', 'scientist']",1.637630581262456,1.3207424886520618,4.4285712242126465,5,0,35
16
- "['Mercedes Benz', 'build quality']",2.1059498341351617,1.5617058192446298,3.857142925262451,31,42,57
17
- "['Hawaii', 'beaches']",1.659939013450344,1.5151668359092634,4.857142925262451,7,33,9
18
- "['Jackson Pollock', 'action painting']",2.13208061424068,1.446368405963585,4.4285712242126465,35,19,35
19
- "['Andhra Pradesh', 'Martinique']",3.079962763598426,1.9438218153226388,1.0,90,90,103
20
- "['Keanu Reeves', 'The Matrix']",2.4481907053911445,1.3843872585630363,4.857142925262451,58,5,9
21
- "['Christopher Columbus', 'India']",2.4675038773077578,2.201138053250345,1.7142857313156128,60,99,88
22
- "['Picasso', 'Guernica']",2.531926183685954,1.5519262551361566,4.857142925262451,65,39,9
23
- "['Italy', 'Renaissance']",2.1476788776698,1.5369287637424967,4.4285712242126465,36,34,35
24
- "['Korea', 'Breakdance']",3.4438237125203486,1.8628185631970635,2.7142856121063232,97,84,73
25
- "['Spain', 'olive oil']",1.670210365590026,1.5490954571308952,4.5714287757873535,8,38,26
26
- "['C.S. Lewis', 'The Screwtape Letters']",1.890430724934506,1.4756095615869196,4.4285712242126465,22,24,35
27
- "['Corsica', 'Napoleon Bonaparte']",2.4831138426360715,1.9022471325455272,4.4285712242126465,62,87,35
28
- "['Rafael Nadal', 'Ralph Macchio']",3.396177477916396,1.647891184288785,1.0,96,58,103
29
- "['Walther P38', 'Lupin the Third']",4.513619538352889,2.220320178652019,3.4285714626312256,106,101,64
30
- "['Richard Harris', 'Curcuma longa']",3.925878661487128,3.0051507411666667,1.0,104,107,103
31
- "['Smashing Pumpkins', 'Thru the eyes of Ruby']",2.701559478037864,1.617355203052018,3.5714285373687744,73,52,63
32
- "['India', 'rum']",2.782247470441063,2.0201010411950517,2.4285714626312256,79,92,78
33
- "['Parasite', 'Jersey']",4.665180668775107,2.0917107526710037,1.2857142686843872,107,96,95
34
- "['India', 'Gurkhas']",1.891718176412827,1.7755981109002028,2.7142856121063232,23,74,73
35
- "['Amazon', 'Alexa']",2.3646542660476477,1.5899399391175666,4.142857074737549,53,46,46
36
- "['Thomas Edison', 'telephone']",1.6915911159454087,1.4076575234169095,3.7142856121063232,10,10,60
37
- "['Buffon', 'PSG']",3.486656345031013,1.7530930722481544,2.4285714626312256,100,71,78
38
- "['Alfred Hitchcock', 'horror']",1.7571665259988574,1.3663635836010108,4.142857074737549,15,3,46
39
- "['William Shakespeare', 'Romeo and Juliet']",1.8295331567106516,1.5081249508332475,4.714285850524902,17,31,17
40
- "['Thomas Edison', 'light bulb']",1.7418718493081657,1.3653392286358037,4.857142925262451,11,2,9
41
- "['Michael Jordan', 'Tessa Thompson']",2.1705268702176226,1.5543171907584603,1.8571428060531616,38,40,86
42
- "['Inglourious Basterds', 'Sergio Busquets']",3.318331366597315,2.1133903510632104,1.0,94,98,103
43
- "['Boris Johnson', 'Brexit']",1.645082124709204,1.5863673272495546,4.4285712242126465,6,45,35
44
- "['Georgia', 'Joseph Stalin']",1.967571912844332,1.8488188109955903,3.7142856121063232,27,81,60
45
- "['China', 'coffee']",2.17544980131227,1.8135234081399012,2.142857074737549,39,80,82
46
- "['OpenAI', 'ChatGPT']",2.9218799850556576,2.0160923267589936,4.5714287757873535,85,91,26
47
- "['Tesco', 'groceries']",2.64246557290908,1.4777211558218561,5.0,71,25,3
48
- "['Amazon', 'cloud computing']",2.778278227603094,1.9068170630915215,3.2857143878936768,78,88,67
49
- "['Nvidia', 'GPUs']",2.316988987900782,1.4512691228933783,4.714285850524902,48,20,17
50
- "['Tony Blair', 'Iraq War']",1.6255000076210981,1.7821177773658414,4.285714149475098,4,77,42
51
- "['Germany', 'techno music']",1.557046019082083,1.629043158489762,3.7142856121063232,3,54,60
52
- "['Michelangelo', 'Pop Art']",1.914005743262575,1.8118542261041621,2.0,25,79,84
53
- "['Gilmore Girls', 'OpenAI']",3.4608540643783776,2.1065435226515503,1.0,99,97,103
54
- "['Ragnarök', 'Little Boy']",3.3044717204594134,1.686534969515163,1.5714285373687744,93,63,89
55
- "['Pizzagate', 'Hillary Clinton']",2.1153132432461224,1.7795214681654785,3.857142925262451,32,76,57
56
- "['George Orwell', 'Coming Up for Air']",2.6697120181771408,2.038484059003444,4.0,72,94,52
57
- "['Italy', 'wine']",1.5255845922312388,1.4373376976319032,4.0,2,16,52
58
- "['Coca-Cola', 'Pepsi']",2.7243095716470473,1.564914630149368,1.4285714626312256,74,43,91
59
- "['Spotify', 'Podcasts']",2.225374084336068,1.778620332709591,3.7142856121063232,43,75,60
60
- "['Valencia', 'paella']",2.2381111940650604,1.6345744732560765,4.5714287757873535,46,56,26
61
- "['The Office', 'IBM']",2.6144005456289228,1.6322135453268423,1.4285714626312256,70,55,91
62
- "['Romania', 'Roman Catholicism']",1.7420108747270733,1.4311110891664438,2.142857074737549,12,15,82
63
- "['George Washington', 'Kiribati']",2.7662484606094733,2.031883419437793,1.0,77,93,103
64
- "['Charles Bronson', 'Rory McIlroy']",3.372548862460059,1.8027123773986695,1.0,95,78,103
65
- "['Belgium', 'wine']",1.492993613315117,1.5421924887285943,2.4285714626312256,1,35,78
66
- "['Luka Modrić', 'Rottweiler']",3.5986081367767677,2.397671526907933,1.0,102,103,103
67
- "['Switzerland', 'mountains']",2.217240085924926,1.6930428362085401,4.714285850524902,42,65,17
68
- "[""Assassin's Creed"", 'history']",2.480178565321259,1.6465573298565803,3.2857143878936768,61,57,67
69
- "['Red Bull', 'energy drinks']",2.555565396718211,1.456624393896625,4.857142925262451,68,21,9
70
- "['Johnny Cash', 'Ring of Fire']",2.895723647778649,1.6823023432944961,4.714285850524902,84,61,17
71
- "['Microsoft', 'Xbox']",2.161852035473677,1.4025903288235062,4.285714149475098,37,8,42
72
- "['Canada', 'maple syrup']",1.8791379262254932,1.558050888058955,4.714285850524902,20,41,17
73
- "['France', 'cheese']",1.9032974601916173,1.5430241918791174,4.5714287757873535,24,36,26
74
- "['France', 'beer']",1.799265670988058,1.616702308516782,1.8571428060531616,16,51,86
75
- "['Nintendo', 'Super Mario Bros.']",2.522732299065512,1.4081862950465882,4.5714287757873535,64,11,26
76
- "['democracy', 'North Korea']",2.4453375211865613,1.874067353187238,1.0,57,85,103
77
- "['Apple', 'iPhone']",2.2376724784744693,1.504551474404591,4.857142925262451,45,30,9
78
- "['Harry Potter', 'Bloomsbury']",2.9940553869252637,1.681124247026932,3.857142925262451,87,60,57
79
- "['France', 'mountains']",2.5338323480997524,2.0420109399379687,3.2857143878936768,66,95,67
80
- "['UK', 'rain']",3.4868263466820824,1.8510534368673026,4.4285712242126465,101,82,35
81
- "['Matt Damon', ""Ocean's Eleven""]",2.850793590408457,1.710292734340407,4.285714149475098,81,67,42
82
- "['Switzerland', 'banking secrecy']",1.4136448220985323,1.4140492427045632,4.0,0,12,52
83
- "['Adidas', 'Yeezy Boost']",3.096664103973119,1.425090203853594,4.142857074737549,91,13,46
84
- "['Portugal', 'Fado']",1.888578072809666,1.4374283843077116,4.142857074737549,21,17,46
85
- "['Italy', 'tea']",2.9581045597178255,1.853469614720884,1.2857142686843872,86,83,95
86
- "['Beatles', 'Come Together']",2.200709602117659,1.3725785586556383,4.142857074737549,40,4,46
87
- "['Afro-Brazilians', 'Capoeira']",2.459139681663665,1.5942359184295292,4.142857074737549,59,47,46
88
- "['Steve Jobs', 'AirPods']",2.120365087557525,1.9190476817297606,1.4285714626312256,33,89,91
89
- "['Apple', 'Apple Watch']",2.4272660450931087,1.5014430337590525,4.0,55,29,52
90
- "['Frank Abagnale Jr', 'doctor']",2.7335836651943284,2.204312972088224,2.142857074737549,75,100,82
91
- "['Meta', 'Instagram']",3.1003940366119402,1.7586111048690642,4.0,92,73,52
92
- "['Jeff Goldblum', 'Jurassic Park']",3.063036730163065,1.472164987898691,4.4285712242126465,88,23,35
93
- "['Leonardo Da Vinci', 'Mona Lisa']",2.3521336157248,1.4838067466083005,5.0,51,26,3
94
- "['Neil Armstrong', 'Korean War']",2.8713334808494806,2.5360696094371646,1.4285714626312256,82,104,91
95
- "['France', 'baguette']",1.9159014965314902,1.6694015995141194,4.5714287757873535,26,59,26
96
- "['Queen', 'Bohemian Rhapsody']",2.894437728771924,1.4253497252771552,4.714285850524902,83,14,17
97
- "['Pixar', 'Novosibirsk']",2.788777406876352,2.9477767154494834,1.0,80,106,103
98
- "['Greggs', 'sausage rolls']",3.0708764950960226,1.7009085678770521,4.4285712242126465,89,66,35
99
- "['Japan', 'sake']",2.2355352210063355,1.7311980801105462,4.0,44,69,52
100
- "['IKEA', 'food']",2.540346699614517,2.3136462632326893,2.0,67,102,84
101
- "['William Grant & Sons', 'gin']",2.281484301404387,1.6904876254513095,3.142857074737549,47,64,70
102
- "['Netherlands', 'tulips']",1.7528172337780288,1.4584905157760355,4.714285850524902,14,22,17
103
- "['LAMY', 'notebook']",3.4530459984394613,1.887999111649344,3.142857074737549,98,86,70
104
- "['Harvey Weinstein', 'Miramax']",2.739748544697059,1.754414354986099,4.714285850524902,76,72,17
105
- "['Scotland', 'whisky']",1.681216536351356,1.399913185634903,4.714285850524902,9,7,17
106
- "['Apple', 'MacBook']",2.43129398966038,1.3878072634111522,4.5714287757873535,56,6,26
107
- "['Steve Jobs', 'Apple']",1.8603701274790823,1.495425847102713,5.0,18,27,3
108
- "['Beatles', 'I Me Mine']",2.57458625549266,1.5983103230707787,2.5714285373687744,69,48,75
109
- "['Google', 'search engine']",1.9884333243582761,1.355647107117722,4.857142925262451,29,1,9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
experiments/analysis/flan_ul2_additional_analysis/simi.csv DELETED
@@ -1,94 +0,0 @@
1
- pairs,score_fewshot,score_zeroshot,score_true,rank_fewshot,rank_zeroshot,rank_true
2
- "['pill', 'tablet']",3.274008286278982,1.0963185810651779,4.857142925262451,23,0,10
3
- "['Eduardo Saverin', 'Guinea-Bissau']",19.392038302744776,4.052577211867362,1.0,90,89,90
4
- "['Dionysus', 'Toyota Corolla']",12.934971548863354,3.4564230570493915,1.1428571939468384,84,87,86
5
- "['Great Britian', 'British Empire']",3.652190726486786,1.272809779852253,4.5714287757873535,31,28,23
6
- "['English', 'Persian']",4.674226586789393,1.6148763949051825,2.0,61,57,70
7
- "['Chess', ""Rubik's Cube""]",4.116549612748183,1.879439692861986,2.142857074737549,47,67,68
8
- "['Star Wars', 'Star Trek']",2.628031851053477,1.4347979273272462,4.285714149475098,8,50,35
9
- "['Anne Heche', 'Ponzi scheme']",20.023976104666094,4.175371728372175,1.2857142686843872,91,90,83
10
- "['Chris Pine', 'Good Friday']",13.089133086102137,3.440924088108119,1.0,85,86,90
11
- "['Sudocrem', 'Germolene']",5.318910995704305,1.6453597895571181,4.4285712242126465,63,58,30
12
- "['Yugoslavia', 'Albania']",6.372893737106588,1.777307939350594,3.7142856121063232,71,62,56
13
- "['Beatles', 'Rolling Stones']",3.3544283622290196,1.1962706778662544,4.714285850524902,25,15,16
14
- "['decision tree', 'neural network']",2.9065665188216347,1.4315812594673885,2.857142925262451,16,49,64
15
- "['Suits', 'Law & Order']",3.800693314383067,1.3978820298258887,4.5714287757873535,35,46,23
16
- "['Titanic', 'Superbad']",4.619188674370307,2.213662292000817,1.7142857313156128,59,78,73
17
- "['Seleucid Empire', 'Macedonian Empire']",6.419775995593114,1.828381638106454,3.7142856121063232,72,64,56
18
- "['Doctor Who', 'Timeless']",3.46088500696327,1.4524423096052965,3.7142856121063232,26,53,56
19
- "['Srebrenica massacre', 'Rock Hudson']",22.27626107039248,9.197721177121391,1.0,92,92,90
20
- "['Arnold Classic', 'Mr. Olympia']",4.487758083728249,1.428413776493121,4.142857074737549,55,48,41
21
- "['Italy', 'Superman']",10.978149906422772,2.573955623240689,1.0,82,82,90
22
- "['Alibaba', 'Amazon']",3.972773665711877,1.3082210072455742,4.4285712242126465,43,40,30
23
- "['Indiana Jones', 'Star Wars']",4.379989690008676,1.6919490885578246,3.2857143878936768,54,60,61
24
- "['NQ64', 'Wetherspoons']",4.132125854770325,2.154384332367406,2.5714285373687744,49,77,67
25
- "['Pepsi', 'Fanta']",4.184848046216775,1.492284581073927,3.4285714626312256,50,54,59
26
- "['Homebase', 'IKEA']",4.070942383407451,1.3055150387108365,4.714285850524902,44,38,16
27
- "['ramen', 'udon']",3.2656156801207405,1.3038570428318557,4.142857074737549,22,37,41
28
- "['England', 'Wales']",3.8088520249291715,1.284211292664953,4.0,36,32,46
29
- "['sphinx', 'sphynx']",1.6117722505862506,1.29134319803738,2.142857074737549,2,34,68
30
- "['iPod', 'iPad']",2.39534504274217,1.1745858450607467,3.4285714626312256,6,9,59
31
- "['Olympic Games', 'X Games']",2.9259438158318263,1.4087563252628894,3.857142925262451,17,47,51
32
- "[""McDonald's"", 'Burger King']",4.103140194258418,1.1845686476202482,5.0,46,12,3
33
- "['Minnesota', 'Wisconsin']",3.8835637723211662,1.3668624638306508,4.5714287757873535,40,43,23
34
- "['Slack', 'Microsoft Teams']",1.010722845182092,1.1083802086359376,4.714285850524902,0,1,16
35
- "['Peter Phillips', 'Christine Baranski']",5.8545935585758615,1.9770690531727926,1.5714285373687744,67,69,75
36
- "['Jenna Ortega', 'Anglo-Saxons']",16.313136929836602,4.34116604633945,1.0,87,91,90
37
- "['Karl Urban', 'France 24']",18.112881550801212,3.737063890873087,1.2857142686843872,89,88,83
38
- "['Sudan', 'South Sudan']",3.154078642930165,1.1413170766716667,3.857142925262451,21,3,51
39
- "['Gameboy', 'Nintendo']",2.6422439751681384,1.1561978345889523,2.7142856121063232,9,5,66
40
- "['Grammy Award', 'Novel Prize']",7.868011909588307,2.000009898204835,3.2857143878936768,76,70,61
41
- "['George Ezra', 'Lead Belly']",5.546488610761862,1.8679975384088594,3.857142925262451,64,66,51
42
- "['Cardiff', 'Swansea']",4.374596221358583,1.3684548520556212,4.4285712242126465,53,44,30
43
- "['Steve Jobs', 'Tim Cook']",2.7787270352070927,1.1722038716948548,3.857142925262451,13,8,51
44
- "['Counter Strike', 'Rainbow Six']",4.1002465293847585,1.6683302843699688,4.714285850524902,45,59,16
45
- "['Hawaii', 'Guam']",3.8173372239015793,1.3596249987134326,4.0,37,42,46
46
- "['Scrabble', 'Jenga']",3.6995412415920823,2.0206657104219063,2.857142925262451,32,71,64
47
- "['Christmas', 'Easter']",3.770784329523088,1.5852128544625168,3.857142925262451,34,56,51
48
- "['fusilli', 'rotini']",2.7562585800855093,1.2170543108656977,4.857142925262451,12,18,10
49
- "['Coachella', 'Woodstock']",2.6805485572072394,1.3026882918541969,4.142857074737549,10,36,41
50
- "['Avatar', 'Archimedes']",6.2921744604165974,2.0498386840332077,1.0,70,73,90
51
- "['Shark', 'Bush']",6.076433738970192,2.473817916476143,4.5714287757873535,68,79,23
52
- "['Eva Braun', 'Phil Jackson']",7.639720036725621,2.073420971334422,1.2857142686843872,74,75,83
53
- "['Coca-Cola', 'Pepsi']",3.287619058514129,1.1603703122935556,5.0,24,6,3
54
- "['Joe Burrow', 'Edward Scissorhands']",9.681085432794521,2.7072382051749715,1.4285714626312256,79,83,79
55
- "['Australia', 'New Zealand']",4.28015926788305,1.2436423214416585,4.857142925262451,51,25,10
56
- "['Edward I', 'William the Conqueror']",7.00715297830466,1.8079957937284559,4.142857074737549,73,63,41
57
- "['Frank Sinatra', 'Ella Fitzgerald']",4.323018875025666,1.2618269168231726,4.285714149475098,52,27,35
58
- "['New York', 'York']",2.0015508023469826,1.2378399248510765,1.5714285373687744,3,22,75
59
- "['Uzbekistan', 'United States']",7.986094222427021,1.8443678727708666,1.4285714626312256,77,65,79
60
- "['Red Bull', 'Monster Energy']",3.5782005144687097,1.2350180713452537,4.857142925262451,30,20,10
61
- "['Champions League', 'Europa League']",2.899351869843832,1.3311126253575225,4.714285850524902,15,41,16
62
- "['Cerave', 'Nivea']",7.860811825983757,1.7120814151917347,4.5714287757873535,75,61,23
63
- "['Galaxy', 'iPhone']",3.890342277335262,1.4420422620374922,4.5714287757873535,41,51,23
64
- "['Mehmet Öz', 'David Schwimmer']",10.394835233042148,2.5171469601925516,1.4285714626312256,81,81,79
65
- "['Disney', 'Pixar']",2.7475137480770297,1.1827246431069653,5.0,11,11,3
66
- "['Batman', 'Iron Man']",4.632936802687815,1.4505700925559022,4.285714149475098,60,52,35
67
- "['Gisele Bündchen', 'Orson Welles']",11.733988200838617,2.482540389029044,1.4285714626312256,83,80,79
68
- "['Estonia', 'Finland']",4.4944878540495745,1.2753268802230482,4.142857074737549,56,30,41
69
- "['The Avengers', 'The Justice League']",3.5272328321428605,1.3073785524764345,4.857142925262451,28,39,10
70
- "['Nicolae Ceaușescu', 'Javier Hernández']",14.816167803611087,2.866841024727357,1.2857142686843872,86,84,83
71
- "['Adidas', 'Nike']",3.128625488872493,1.1759724560937117,5.0,19,10,3
72
- "['Java', 'Javascript']",2.201677080836342,1.1638525989865072,3.142857074737549,4,7,63
73
- "['bourbon', 'Scotch whisky']",3.151192694795426,1.2018479334928887,4.4285712242126465,20,17,30
74
- "['Alaska', 'Canada']",5.749840363096197,2.051522647924928,4.142857074737549,66,74,41
75
- "['Spain', 'Italy']",6.114873120356638,1.2304383339125753,4.285714149475098,69,19,35
76
- "['banana', 'plantain']",3.898063267431944,1.239858409557863,4.714285850524902,42,24,16
77
- "['Firefox', 'Chrome']",3.5003419555946533,1.2388581155089609,4.857142925262451,27,23,10
78
- "['Pecorino Romano', 'Parmesan']",3.0545436291513535,1.1561388450582295,4.5714287757873535,18,4,23
79
- "['Ligue 1', 'Bundesliga']",4.5064748482425,1.23614659258189,5.0,57,21,3
80
- "['Netflix', 'Amazon Prime Video']",2.3484919883495508,1.1320119721227202,4.5714287757873535,5,2,23
81
- "['Primark', 'Shein']",4.510338532624208,1.3975065558414137,4.4285712242126465,58,45,30
82
- "[""Dominos' Pizza"", 'Pizza Hut']",3.769478721730652,1.1959740216706212,4.857142925262451,33,14,10
83
- "['South Africa', 'Bhagavad Gita']",17.183548050370515,3.0240732935848014,1.0,88,85,90
84
- "['Germany', 'France']",4.7596090117583785,1.2762333438571687,3.4285714626312256,62,31,59
85
- "['Kindle', 'Jeff Bezos']",3.876382551537531,1.2458504058572832,1.5714285373687744,39,26,75
86
- "['Harry Potter', 'Lord of the Rings']",3.8590344817902005,1.5231994501082553,4.142857074737549,38,55,41
87
- "['Yakutia', 'Turkey']",9.843375743462872,1.9136274793204298,1.8571428060531616,80,68,71
88
- "['PS5', 'XBox']",2.57469644019121,1.2867747741997058,5.0,7,33,3
89
- "['Monet', 'Manet']",2.8312851205965224,1.1854059397722165,4.0,14,13,46
90
- "['Glastonbury', 'Roskilde']",5.702882133219245,2.0309699733249644,4.4285712242126465,65,72,30
91
- "['Telugu', 'Tamil']",4.131681071938202,1.200778959518929,3.857142925262451,48,16,51
92
- "['Batman', 'Superman']",3.546625877820798,1.2740821846390384,4.0,29,29,46
93
- "['cannoli', 'canneloni']",1.5953917799354116,1.3014626400370175,1.8571428060531616,1,35,71
94
- "['Gerald Ford', 'Duran Duran']",8.6988355575664,2.1149843195939737,1.4285714626312256,78,76,79
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
experiments/baseline_lm_lc.py CHANGED
@@ -20,16 +20,16 @@ data = load_dataset("cardiffnlp/relentless", split="test")
20
  full_result = []
21
  for lm, ppl_class, batch, pretty_name in [
22
  # ("google/flan-ul2", EncoderDecoderLM, 1, "Flan-UL2"),
23
- ("google/flan-t5-xxl", EncoderDecoderLM, 1, "Flan-T5\textsubscript{XXL}"),
24
- ("google/flan-t5-xl", EncoderDecoderLM, 1, "Flan-T5\textsubscript{XL}"),
25
- ("google/flan-t5-large", EncoderDecoderLM, 32, "Flan-T5\textsubscript{LARGE}"),
26
- ("google/flan-t5-base", EncoderDecoderLM, 128, "Flan-T5\textsubscript{BASE}"),
27
- ("google/flan-t5-small", EncoderDecoderLM, 256, "Flan-T5\textsubscript{SMALL}"),
28
- ("t5-11b", EncoderDecoderLM, 1, "T5\textsubscript{XXL}"),
29
- ("t5-3b", EncoderDecoderLM, 1, "T5\textsubscript{XL}"),
30
- ("t5-large", EncoderDecoderLM, 32, "T5\textsubscript{LARGE}"),
31
- ("t5-base", EncoderDecoderLM, 128, "T5\textsubscript{BASE}"),
32
- ("t5-small", EncoderDecoderLM, 256, "T5\textsubscript{SMALL}"),
33
  # ("facebook/opt-66b", LM, 1, "OPT\textsubscript{66B}"),
34
  ("facebook/opt-30b", LM, 1, "OPT\textsubscript{30B}"),
35
  ("facebook/opt-13b", LM, 1, "OPT\textsubscript{13B}"),
@@ -42,7 +42,7 @@ for lm, ppl_class, batch, pretty_name in [
42
  ("facebook/opt-iml-1.3b", LM, 1, "OPT-IML\textsubscript{1.3B}"),
43
  ("facebook/opt-iml-max-30b", LM, 1, "OPT-IML\textsubscript{MAX-30B}"),
44
  ("facebook/opt-iml-max-1.3b", LM, 1, "OPT-IML\textsubscript{MAX-1.3B}"),
45
- # ("davinci", OpenAI, None, "GPT-3\textsubscript{davinci}")
46
  ]:
47
  os.makedirs(f"results/lm_lc/{os.path.basename(lm)}", exist_ok=True)
48
  scorer = None
 
20
  full_result = []
21
  for lm, ppl_class, batch, pretty_name in [
22
  # ("google/flan-ul2", EncoderDecoderLM, 1, "Flan-UL2"),
23
+ # ("google/flan-t5-xxl", EncoderDecoderLM, 1, "Flan-T5\textsubscript{XXL}"),
24
+ # ("google/flan-t5-xl", EncoderDecoderLM, 1, "Flan-T5\textsubscript{XL}"),
25
+ # ("google/flan-t5-large", EncoderDecoderLM, 32, "Flan-T5\textsubscript{LARGE}"),
26
+ # ("google/flan-t5-base", EncoderDecoderLM, 128, "Flan-T5\textsubscript{BASE}"),
27
+ # ("google/flan-t5-small", EncoderDecoderLM, 256, "Flan-T5\textsubscript{SMALL}"),
28
+ # ("t5-11b", EncoderDecoderLM, 1, "T5\textsubscript{XXL}"),
29
+ # ("t5-3b", EncoderDecoderLM, 1, "T5\textsubscript{XL}"),
30
+ # ("t5-large", EncoderDecoderLM, 32, "T5\textsubscript{LARGE}"),
31
+ # ("t5-base", EncoderDecoderLM, 128, "T5\textsubscript{BASE}"),
32
+ # ("t5-small", EncoderDecoderLM, 256, "T5\textsubscript{SMALL}"),
33
  # ("facebook/opt-66b", LM, 1, "OPT\textsubscript{66B}"),
34
  ("facebook/opt-30b", LM, 1, "OPT\textsubscript{30B}"),
35
  ("facebook/opt-13b", LM, 1, "OPT\textsubscript{13B}"),
 
42
  ("facebook/opt-iml-1.3b", LM, 1, "OPT-IML\textsubscript{1.3B}"),
43
  ("facebook/opt-iml-max-30b", LM, 1, "OPT-IML\textsubscript{MAX-30B}"),
44
  ("facebook/opt-iml-max-1.3b", LM, 1, "OPT-IML\textsubscript{MAX-1.3B}"),
45
+ ("davinci", OpenAI, None, "GPT-3\textsubscript{davinci}")
46
  ]:
47
  os.makedirs(f"results/lm_lc/{os.path.basename(lm)}", exist_ok=True)
48
  scorer = None
experiments/baseline_lm_lc_fewshot.py CHANGED
@@ -10,13 +10,13 @@ OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", None)
10
  runs = 3
11
  shots_num = [1, 3]
12
  prompt_dict = {
13
- "is friend/ally of": "entities that are friends or allies",
14
- "is competitor/rival of": "entities that are competitors or rivals",
15
- "is known for": "examples of what entities are known for",
16
- "is influenced by": "what has influenced different entities",
17
- "is similar to": "examples of entities that are similar"
18
  }
19
- data = load_dataset("cardiffnlp/relentless_full", split="test")
20
  shots_ref = {}
21
  for shots in shots_num:
22
  all_perms = list(permutations(range(5), shots))
@@ -35,10 +35,10 @@ for lm, ppl_class, batch, pretty_name in [
35
  scorer = None
36
  for shots in shots_num:
37
  for s in range(runs):
38
- os.makedirs(f"experiments/results/lm_lc_{shots}shots_{s}seed/{os.path.basename(lm)}", exist_ok=True)
39
 
40
  for d in data:
41
- ppl_file = f"experiments/results/lm_lc_{shots}shots_{s}seed/{os.path.basename(lm)}/ppl.{d['relation_type'].replace(' ', '_').replace('/', '__')}.jsonl"
42
 
43
  if not os.path.exists(ppl_file):
44
 
@@ -48,7 +48,7 @@ for lm, ppl_class, batch, pretty_name in [
48
  else:
49
  scorer = ppl_class(lm, device_map='auto', low_cpu_mem_usage=True, offload_folder=f"./offload_folder/{os.path.basename(lm)}")
50
 
51
- demo = [d['positive_examples'][h] for h in list(shots_ref[shots][s])]
52
  # proto = ",".join([f'["{a}", "{b}"]' for a, b in demo])
53
  content = "\n".join([f'* ["{a}", "{b}"]' for a, b in demo])
54
  prompt_input = f"{prompt_dict[d['relation_type']]}:\n{content}"
@@ -83,7 +83,7 @@ models = df['model'].unique()
83
  df = df.pivot(columns="relation_type", index=["model", "shot", "seed"], values="correlation")
84
  df = df.T[models].T
85
  df['average'] = df.mean(1)
86
- df.to_csv(f"experiments/results/lm_lc_fewshots.csv")
87
  df = (100 * df).round()
88
  print(df)
89
  print(df.to_markdown())
 
10
  runs = 3
11
  shots_num = [1, 3]
12
  prompt_dict = {
13
+ "friend/ally of": "entities that are friends or allies",
14
+ "competitor/rival of": "entities that are competitors or rivals",
15
+ "known for": "examples of what entities are known for",
16
+ "influenced by": "what has influenced different entities",
17
+ "similar to": "examples of entities that are similar"
18
  }
19
+ data = load_dataset("cardiffnlp/relentless", split="test")
20
  shots_ref = {}
21
  for shots in shots_num:
22
  all_perms = list(permutations(range(5), shots))
 
35
  scorer = None
36
  for shots in shots_num:
37
  for s in range(runs):
38
+ os.makedirs(f"results/lm_lc_{shots}shots_{s}seed/{os.path.basename(lm)}", exist_ok=True)
39
 
40
  for d in data:
41
+ ppl_file = f"results/lm_lc_{shots}shots_{s}seed/{os.path.basename(lm)}/ppl.{d['relation_type'].replace(' ', '_').replace('/', '__')}.jsonl"
42
 
43
  if not os.path.exists(ppl_file):
44
 
 
48
  else:
49
  scorer = ppl_class(lm, device_map='auto', low_cpu_mem_usage=True, offload_folder=f"./offload_folder/{os.path.basename(lm)}")
50
 
51
+ demo = [d['prototypical_examples'][h] for h in list(shots_ref[shots][s])]
52
  # proto = ",".join([f'["{a}", "{b}"]' for a, b in demo])
53
  content = "\n".join([f'* ["{a}", "{b}"]' for a, b in demo])
54
  prompt_input = f"{prompt_dict[d['relation_type']]}:\n{content}"
 
83
  df = df.pivot(columns="relation_type", index=["model", "shot", "seed"], values="correlation")
84
  df = df.T[models].T
85
  df['average'] = df.mean(1)
86
+ df.to_csv(f"results/lm_lc_fewshots.csv")
87
  df = (100 * df).round()
88
  print(df)
89
  print(df.to_markdown())
experiments/baseline_lm_lc_zeroshot.py CHANGED
@@ -8,11 +8,11 @@ from lmppl import EncoderDecoderLM, LM, OpenAI
8
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", None)
9
 
10
  prompt_dict = {
11
- "is friend/ally of": "entities that are friends or allies",
12
- "is competitor/rival of": "entities that are competitors or rivals",
13
- "is known for": "examples of what entities are known for",
14
- "is influenced by": "what has influenced different entities",
15
- "is similar to": "examples of entities that are similar"
16
  }
17
  data = load_dataset("cardiffnlp/relentless_full", split="test")
18
  full_result = []
@@ -22,10 +22,10 @@ for lm, ppl_class, batch, pretty_name in [
22
  ("facebook/opt-13b", LM, 1, "OPT\textsubscript{13B}"),
23
  ("davinci", OpenAI, None, "GPT-3\textsubscript{davinci}")
24
  ]:
25
- os.makedirs(f"experiments/results/lm_lc_zeroshot/{os.path.basename(lm)}", exist_ok=True)
26
  scorer = None
27
  for d in data:
28
- ppl_file = f"experiments/results/lm_lc_zeroshot/{os.path.basename(lm)}/ppl.{d['relation_type'].replace(' ', '_').replace('/', '__')}.jsonl"
29
 
30
  if not os.path.exists(ppl_file):
31
 
@@ -35,7 +35,7 @@ for lm, ppl_class, batch, pretty_name in [
35
  else:
36
  scorer = ppl_class(lm, device_map='auto', low_cpu_mem_usage=True, offload_folder=f"./offload_folder/{os.path.basename(lm)}")
37
 
38
- proto = ",".join([f'["{a}", "{b}"]' for a, b in d['positive_examples']])
39
  prefix = f"Complete the following list with examples of {prompt_dict[d['relation_type']]}"
40
  if ppl_class is LM or ppl_class is OpenAI:
41
  prompt_input = [f'{prefix}\n * ["{x}", "{y}"]' for x, y in d['pairs']]
@@ -68,7 +68,7 @@ df = df.T[models].T
68
  df['average'] = df.mean(1)
69
  df['shot'] = 0
70
  df['seed'] = 0
71
- df.to_csv("experiments/results/lm_lc_zeroshot.csv")
72
  df = (100 * df).round()
73
  print(df.to_markdown())
74
  print(df.to_latex(escape=False))
 
8
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", None)
9
 
10
  prompt_dict = {
11
+ "friend/ally of": "entities that are friends or allies",
12
+ "competitor/rival of": "entities that are competitors or rivals",
13
+ "known for": "examples of what entities are known for",
14
+ "influenced by": "what has influenced different entities",
15
+ "similar to": "examples of entities that are similar"
16
  }
17
  data = load_dataset("cardiffnlp/relentless_full", split="test")
18
  full_result = []
 
22
  ("facebook/opt-13b", LM, 1, "OPT\textsubscript{13B}"),
23
  ("davinci", OpenAI, None, "GPT-3\textsubscript{davinci}")
24
  ]:
25
+ os.makedirs(f"results/lm_lc_zeroshot/{os.path.basename(lm)}", exist_ok=True)
26
  scorer = None
27
  for d in data:
28
+ ppl_file = f"results/lm_lc_zeroshot/{os.path.basename(lm)}/ppl.{d['relation_type'].replace(' ', '_').replace('/', '__')}.jsonl"
29
 
30
  if not os.path.exists(ppl_file):
31
 
 
35
  else:
36
  scorer = ppl_class(lm, device_map='auto', low_cpu_mem_usage=True, offload_folder=f"./offload_folder/{os.path.basename(lm)}")
37
 
38
+ proto = ",".join([f'["{a}", "{b}"]' for a, b in d['prototypical_examples']])
39
  prefix = f"Complete the following list with examples of {prompt_dict[d['relation_type']]}"
40
  if ppl_class is LM or ppl_class is OpenAI:
41
  prompt_input = [f'{prefix}\n * ["{x}", "{y}"]' for x, y in d['pairs']]
 
68
  df['average'] = df.mean(1)
69
  df['shot'] = 0
70
  df['seed'] = 0
71
+ df.to_csv("results/lm_lc_zeroshot.csv")
72
  df = (100 * df).round()
73
  print(df.to_markdown())
74
  print(df.to_latex(escape=False))
experiments/baseline_lm_qa.py CHANGED
@@ -42,7 +42,7 @@ for lm, ppl_class, batch, pretty_name in [
42
  ("facebook/opt-iml-1.3b", LM, 1, "OPT-IML\textsubscript{1.3B}"),
43
  ("facebook/opt-iml-max-30b", LM, 1, "OPT-IML\textsubscript{MAX-30B}"),
44
  ("facebook/opt-iml-max-1.3b", LM, 1, "OPT-IML\textsubscript{MAX-1.3B}"),
45
- # ("davinci", OpenAI, None, "GPT-3\textsubscript{davinci}")
46
  ]:
47
  os.makedirs(f"results/lm_qa/{os.path.basename(lm)}", exist_ok=True)
48
  scorer = None
 
42
  ("facebook/opt-iml-1.3b", LM, 1, "OPT-IML\textsubscript{1.3B}"),
43
  ("facebook/opt-iml-max-30b", LM, 1, "OPT-IML\textsubscript{MAX-30B}"),
44
  ("facebook/opt-iml-max-1.3b", LM, 1, "OPT-IML\textsubscript{MAX-1.3B}"),
45
+ ("davinci", OpenAI, None, "GPT-3\textsubscript{davinci}")
46
  ]:
47
  os.makedirs(f"results/lm_qa/{os.path.basename(lm)}", exist_ok=True)
48
  scorer = None
experiments/baseline_lm_qa_fewshot.py CHANGED
@@ -10,11 +10,11 @@ OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", None)
10
  runs = 3
11
  shots_num = [1, 3]
12
  prompt_dict = {
13
- "is friend/ally of": "entities that are friends or allies",
14
- "is competitor/rival of": "entities that are competitors or rivals",
15
- "is known for": "examples of what entities are known for",
16
- "is influenced by": "what has influenced different entities",
17
- "is similar to": "examples of entities that are similar"
18
  }
19
  data = load_dataset("cardiffnlp/relentless_full", split="test")
20
  shots_ref = {}
@@ -34,9 +34,9 @@ for lm, ppl_class, batch, pretty_name in [
34
  scorer = None
35
  for shots in shots_num:
36
  for s in range(runs):
37
- os.makedirs(f"experiments/results/lm_qa_{shots}shots_{s}seed/{os.path.basename(lm)}", exist_ok=True)
38
  for d in data:
39
- ppl_file = f"experiments/results/lm_qa_{shots}shots_{s}seed/{os.path.basename(lm)}/ppl.{d['relation_type'].replace(' ', '_').replace('/', '__')}.jsonl"
40
 
41
  if not os.path.exists(ppl_file):
42
  if scorer is None:
@@ -44,7 +44,7 @@ for lm, ppl_class, batch, pretty_name in [
44
  scorer = ppl_class(OPENAI_API_KEY, model=lm)
45
  else:
46
  scorer = ppl_class(lm, device_map='auto', low_cpu_mem_usage=True, offload_folder=f"./offload_folder/{os.path.basename(lm)}")
47
- demo = [d['positive_examples'][h] for h in list(shots_ref[shots][s])]
48
  proto = ",".join([f'["{a}", "{b}"]' for a, b in demo])
49
  prefix = f"Answer the question by yes or no. We know that {proto} are examples of {prompt_dict[d['relation_type']]}."
50
  if ppl_class is LM or ppl_class is OpenAI:
@@ -76,7 +76,7 @@ models = df['model'].unique()
76
  df = df.pivot(columns="relation_type", index=["model", "shot", "seed"], values="correlation")
77
  df = df.T[models].T
78
  df['average'] = df.mean(1)
79
- df.to_csv(f"experiments/results/lm_qa_fewshots.csv")
80
  df = (100 * df).round()
81
  print(df)
82
  print(df.to_markdown())
 
10
  runs = 3
11
  shots_num = [1, 3]
12
  prompt_dict = {
13
+ "friend/ally of": "entities that are friends or allies",
14
+ "competitor/rival of": "entities that are competitors or rivals",
15
+ "known for": "examples of what entities are known for",
16
+ "influenced by": "what has influenced different entities",
17
+ "similar to": "examples of entities that are similar"
18
  }
19
  data = load_dataset("cardiffnlp/relentless_full", split="test")
20
  shots_ref = {}
 
34
  scorer = None
35
  for shots in shots_num:
36
  for s in range(runs):
37
+ os.makedirs(f"results/lm_qa_{shots}shots_{s}seed/{os.path.basename(lm)}", exist_ok=True)
38
  for d in data:
39
+ ppl_file = f"results/lm_qa_{shots}shots_{s}seed/{os.path.basename(lm)}/ppl.{d['relation_type'].replace(' ', '_').replace('/', '__')}.jsonl"
40
 
41
  if not os.path.exists(ppl_file):
42
  if scorer is None:
 
44
  scorer = ppl_class(OPENAI_API_KEY, model=lm)
45
  else:
46
  scorer = ppl_class(lm, device_map='auto', low_cpu_mem_usage=True, offload_folder=f"./offload_folder/{os.path.basename(lm)}")
47
+ demo = [d['prototypical_examples'][h] for h in list(shots_ref[shots][s])]
48
  proto = ",".join([f'["{a}", "{b}"]' for a, b in demo])
49
  prefix = f"Answer the question by yes or no. We know that {proto} are examples of {prompt_dict[d['relation_type']]}."
50
  if ppl_class is LM or ppl_class is OpenAI:
 
76
  df = df.pivot(columns="relation_type", index=["model", "shot", "seed"], values="correlation")
77
  df = df.T[models].T
78
  df['average'] = df.mean(1)
79
+ df.to_csv(f"results/lm_qa_fewshots.csv")
80
  df = (100 * df).round()
81
  print(df)
82
  print(df.to_markdown())
experiments/baseline_lm_qa_zeroshot.py CHANGED
@@ -8,11 +8,11 @@ from lmppl import EncoderDecoderLM, LM, OpenAI
8
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", None)
9
 
10
  prompt_dict = {
11
- "is friend/ally of": "entities that are friends or allies",
12
- "is competitor/rival of": "entities that are competitors or rivals",
13
- "is known for": "examples of what entities are known for",
14
- "is influenced by": "what has influenced different entities",
15
- "is similar to": "examples of entities that are similar"
16
  }
17
  data = load_dataset("cardiffnlp/relentless_full", split="test")
18
  full_result = []
@@ -22,10 +22,10 @@ for lm, ppl_class, batch, pretty_name in [
22
  ("facebook/opt-13b", LM, 1, "OPT\textsubscript{13B}"),
23
  ("davinci", OpenAI, None, "GPT-3\textsubscript{davinci}")
24
  ]:
25
- os.makedirs(f"experiments/results/lm_qa_zeroshot/{os.path.basename(lm)}", exist_ok=True)
26
  scorer = None
27
  for d in data:
28
- ppl_file = f"experiments/results/lm_qa_zeroshot/{os.path.basename(lm)}/ppl.{d['relation_type'].replace(' ', '_').replace('/', '__')}.jsonl"
29
 
30
  if not os.path.exists(ppl_file):
31
 
@@ -35,7 +35,7 @@ for lm, ppl_class, batch, pretty_name in [
35
  else:
36
  scorer = ppl_class(lm, device_map='auto', low_cpu_mem_usage=True, offload_folder=f"./offload_folder/{os.path.basename(lm)}")
37
 
38
- proto = ",".join([f'["{a}", "{b}"]' for a, b in d['positive_examples']])
39
  prefix = f"Answer the question by yes or no."
40
  # prompt_input = f"{prompt_dict[d['relation_type']]}:\n{content}"
41
  if ppl_class is LM or ppl_class is OpenAI:
@@ -69,7 +69,7 @@ df = df.T[models].T
69
  df['average'] = df.mean(1)
70
  df['shot'] = 0
71
  df['seed'] = 0
72
- df.to_csv("experiments/results/lm_qa_zeroshot.csv")
73
  df = (100 * df).round()
74
  print(df.to_markdown())
75
  print(df.to_latex(escape=False))
 
8
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", None)
9
 
10
  prompt_dict = {
11
+ "friend/ally of": "entities that are friends or allies",
12
+ "competitor/rival of": "entities that are competitors or rivals",
13
+ "known for": "examples of what entities are known for",
14
+ "influenced by": "what has influenced different entities",
15
+ "similar to": "examples of entities that are similar"
16
  }
17
  data = load_dataset("cardiffnlp/relentless_full", split="test")
18
  full_result = []
 
22
  ("facebook/opt-13b", LM, 1, "OPT\textsubscript{13B}"),
23
  ("davinci", OpenAI, None, "GPT-3\textsubscript{davinci}")
24
  ]:
25
+ os.makedirs(f"results/lm_qa_zeroshot/{os.path.basename(lm)}", exist_ok=True)
26
  scorer = None
27
  for d in data:
28
+ ppl_file = f"results/lm_qa_zeroshot/{os.path.basename(lm)}/ppl.{d['relation_type'].replace(' ', '_').replace('/', '__')}.jsonl"
29
 
30
  if not os.path.exists(ppl_file):
31
 
 
35
  else:
36
  scorer = ppl_class(lm, device_map='auto', low_cpu_mem_usage=True, offload_folder=f"./offload_folder/{os.path.basename(lm)}")
37
 
38
+ proto = ",".join([f'["{a}", "{b}"]' for a, b in d['prototypical_examples']])
39
  prefix = f"Answer the question by yes or no."
40
  # prompt_input = f"{prompt_dict[d['relation_type']]}:\n{content}"
41
  if ppl_class is LM or ppl_class is OpenAI:
 
69
  df['average'] = df.mean(1)
70
  df['shot'] = 0
71
  df['seed'] = 0
72
+ df.to_csv("results/lm_qa_zeroshot.csv")
73
  df = (100 * df).round()
74
  print(df.to_markdown())
75
  print(df.to_latex(escape=False))
experiments/baseline_validation_lc.py CHANGED
@@ -8,13 +8,13 @@ from lmppl import EncoderDecoderLM, LM, OpenAI
8
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", None)
9
 
10
  prompt_dict = {
11
- "is friend/ally of": "Complete the following list with examples of entities that are friends or allies",
12
- "is competitor/rival of": "Complete the following list with examples of entities that are competitors or rivals",
13
- "is known for": "Complete the following list with examples of what entities are known for",
14
- "is influenced by": "Complete the following list with examples of what has influenced different entities",
15
- "is similar to": "Complete the following list with examples of entities that are similar"
16
  }
17
- data = load_dataset("cardiffnlp/relentless_full", split="validation")
18
  full_result = []
19
  for lm, ppl_class, batch, pretty_name in [
20
  ("google/flan-t5-xxl", EncoderDecoderLM, 1, "Flan-T5\textsubscript{XXL}"),
@@ -22,10 +22,10 @@ for lm, ppl_class, batch, pretty_name in [
22
  ("facebook/opt-13b", LM, 1, "OPT\textsubscript{13B}"),
23
  ("davinci", OpenAI, None, "GPT-3\textsubscript{davinci}")
24
  ]:
25
- os.makedirs(f"experiments/results_validation/lm_lc/{os.path.basename(lm)}", exist_ok=True)
26
  scorer = None
27
  for d in data:
28
- ppl_file = f"experiments/results_validation/lm_lc/{os.path.basename(lm)}/ppl.{d['relation_type'].replace(' ', '_').replace('/', '__')}.jsonl"
29
 
30
  if not os.path.exists(ppl_file):
31
 
@@ -35,7 +35,7 @@ for lm, ppl_class, batch, pretty_name in [
35
  else:
36
  scorer = ppl_class(lm, device_map='auto', low_cpu_mem_usage=True, offload_folder=f"./offload_folder/{os.path.basename(lm)}")
37
 
38
- content = "\n".join([f'* ["{a}", "{b}"]' for a, b in d['positive_examples']])
39
  prompt_input = f"{prompt_dict[d['relation_type']]}:\n{content}"
40
  if ppl_class is LM:
41
  prompt_input = [f'{prompt_input}\n* ["{x}", "{y}"]' for x, y in d['pairs']]
@@ -69,7 +69,7 @@ print(df)
69
  df = df.pivot(columns="relation_type", index="model", values="correlation")
70
  df = df.T[models].T
71
  df['average'] = df.mean(1)
72
- df.to_csv("experiments/results_validation/lm_lc/lm.csv")
73
  df = (100 * df).round()
74
  print(df.to_markdown())
75
  print(df.to_latex(escape=False))
 
8
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", None)
9
 
10
  prompt_dict = {
11
+ "friend/ally of": "Complete the following list with examples of entities that are friends or allies",
12
+ "competitor/rival of": "Complete the following list with examples of entities that are competitors or rivals",
13
+ "known for": "Complete the following list with examples of what entities are known for",
14
+ "influenced by": "Complete the following list with examples of what has influenced different entities",
15
+ "similar to": "Complete the following list with examples of entities that are similar"
16
  }
17
+ data = load_dataset("cardiffnlp/relentless", split="validation")
18
  full_result = []
19
  for lm, ppl_class, batch, pretty_name in [
20
  ("google/flan-t5-xxl", EncoderDecoderLM, 1, "Flan-T5\textsubscript{XXL}"),
 
22
  ("facebook/opt-13b", LM, 1, "OPT\textsubscript{13B}"),
23
  ("davinci", OpenAI, None, "GPT-3\textsubscript{davinci}")
24
  ]:
25
+ os.makedirs(f"results_validation/lm_lc/{os.path.basename(lm)}", exist_ok=True)
26
  scorer = None
27
  for d in data:
28
+ ppl_file = f"results_validation/lm_lc/{os.path.basename(lm)}/ppl.{d['relation_type'].replace(' ', '_').replace('/', '__')}.jsonl"
29
 
30
  if not os.path.exists(ppl_file):
31
 
 
35
  else:
36
  scorer = ppl_class(lm, device_map='auto', low_cpu_mem_usage=True, offload_folder=f"./offload_folder/{os.path.basename(lm)}")
37
 
38
+ content = "\n".join([f'* ["{a}", "{b}"]' for a, b in d['prototypical_examples']])
39
  prompt_input = f"{prompt_dict[d['relation_type']]}:\n{content}"
40
  if ppl_class is LM:
41
  prompt_input = [f'{prompt_input}\n* ["{x}", "{y}"]' for x, y in d['pairs']]
 
69
  df = df.pivot(columns="relation_type", index="model", values="correlation")
70
  df = df.T[models].T
71
  df['average'] = df.mean(1)
72
+ df.to_csv("results_validation/lm_lc/lm.csv")
73
  df = (100 * df).round()
74
  print(df.to_markdown())
75
  print(df.to_latex(escape=False))
experiments/baseline_validation_qa.py CHANGED
@@ -8,11 +8,11 @@ from lmppl import EncoderDecoderLM, LM, OpenAI
8
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", None)
9
 
10
  prompt_dict = {
11
- "is friend/ally of": "entities that are friends or allies",
12
- "is competitor/rival of": "entities that are competitors or rivals",
13
- "is known for": "examples of what entities are known for",
14
- "is influenced by": "what has influenced different entities",
15
- "is similar to": "examples of entities that are similar"
16
  }
17
  data = load_dataset("cardiffnlp/relentless_full", split="validation")
18
  full_result = []
@@ -22,10 +22,10 @@ for lm, ppl_class, batch, pretty_name in [
22
  ("facebook/opt-13b", LM, 1, "OPT\textsubscript{13B}"),
23
  ("davinci", OpenAI, None, "GPT-3\textsubscript{davinci}")
24
  ]:
25
- os.makedirs(f"experiments/results_validation/lm_qa/{os.path.basename(lm)}", exist_ok=True)
26
  scorer = None
27
  for d in data:
28
- ppl_file = f"experiments/results_validation/lm_qa/{os.path.basename(lm)}/ppl.{d['relation_type'].replace(' ', '_').replace('/', '__')}.jsonl"
29
 
30
  if not os.path.exists(ppl_file):
31
 
@@ -35,7 +35,7 @@ for lm, ppl_class, batch, pretty_name in [
35
  else:
36
  scorer = ppl_class(lm, device_map='auto', low_cpu_mem_usage=True, offload_folder=f"./offload_folder/{os.path.basename(lm)}")
37
 
38
- proto = ",".join([f'["{a}", "{b}"]' for a, b in d['positive_examples']])
39
  prefix = f"Answer the question by yes or no. We know that {proto} are examples of {prompt_dict[d['relation_type']]}."
40
  if ppl_class is LM or ppl_class is OpenAI:
41
  prompt_input = [f'{prefix} Are ["{x}", "{y}"] {prompt_dict[d["relation_type"]]} as well?\n yes' for x, y in d['pairs']]
@@ -66,7 +66,7 @@ models = df['model'].unique()
66
  df = df.pivot(columns="relation_type", index="model", values="correlation")
67
  df = df.T[models].T
68
  df['average'] = df.mean(1)
69
- df.to_csv("experiments/results_validation/lm_qa/lm.csv")
70
  df = (100 * df).round()
71
  print(df.to_markdown())
72
  print(df.to_latex(escape=False))
 
8
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", None)
9
 
10
  prompt_dict = {
11
+ "friend/ally of": "entities that are friends or allies",
12
+ "competitor/rival of": "entities that are competitors or rivals",
13
+ "known for": "examples of what entities are known for",
14
+ "influenced by": "what has influenced different entities",
15
+ "similar to": "examples of entities that are similar"
16
  }
17
  data = load_dataset("cardiffnlp/relentless_full", split="validation")
18
  full_result = []
 
22
  ("facebook/opt-13b", LM, 1, "OPT\textsubscript{13B}"),
23
  ("davinci", OpenAI, None, "GPT-3\textsubscript{davinci}")
24
  ]:
25
+ os.makedirs(f"results_validation/lm_qa/{os.path.basename(lm)}", exist_ok=True)
26
  scorer = None
27
  for d in data:
28
+ ppl_file = f"results_validation/lm_qa/{os.path.basename(lm)}/ppl.{d['relation_type'].replace(' ', '_').replace('/', '__')}.jsonl"
29
 
30
  if not os.path.exists(ppl_file):
31
 
 
35
  else:
36
  scorer = ppl_class(lm, device_map='auto', low_cpu_mem_usage=True, offload_folder=f"./offload_folder/{os.path.basename(lm)}")
37
 
38
+ proto = ",".join([f'["{a}", "{b}"]' for a, b in d['prototypical_examples']])
39
  prefix = f"Answer the question by yes or no. We know that {proto} are examples of {prompt_dict[d['relation_type']]}."
40
  if ppl_class is LM or ppl_class is OpenAI:
41
  prompt_input = [f'{prefix} Are ["{x}", "{y}"] {prompt_dict[d["relation_type"]]} as well?\n yes' for x, y in d['pairs']]
 
66
  df = df.pivot(columns="relation_type", index="model", values="correlation")
67
  df = df.T[models].T
68
  df['average'] = df.mean(1)
69
+ df.to_csv("results_validation/lm_qa/lm.csv")
70
  df = (100 * df).round()
71
  print(df.to_markdown())
72
  print(df.to_latex(escape=False))
experiments/results/chat/gpt-4.known_for.json ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ['Vincent Van Gogh', 'The Starry Night']
2
+ ['Alphabet Inc.', 'Google']
3
+ ['Bill Nye', 'scientist']
4
+ ['Mercedes Benz', 'build quality']
5
+ ['Jackson Pollock', 'action painting']
6
+ ['Keanu Reeves', 'The Matrix']
7
+ ['Picasso', 'Guernica']
8
+ ['Italy', 'Renaissance']
9
+ ['Spain', 'olive oil']
10
+ ['C.S. Lewis', 'The Screwtape Letters']
11
+ ['Corsica', 'Napoleon Bonaparte']
12
+ ['Amazon', 'Alexa']
13
+ ['Thomas Edison', 'light bulb']
14
+ ['Alfred Hitchcock', 'horror']
15
+ ['William Shakespeare', 'Romeo and Juliet']
16
+ ['Boris Johnson', 'Brexit']
17
+ ['Georgia', 'Joseph Stalin']
18
+ ['OpenAI', 'ChatGPT']
19
+ ['Tesco', 'groceries']
20
+ ['Amazon', 'cloud computing']
21
+ ['Nvidia', 'GPUs']
22
+ ['Tony Blair', 'Iraq War']
23
+ ['Germany', 'techno music']
24
+ ['Italy', 'wine']
25
+ ['Coca-Cola', 'Pepsi']
26
+ ['Spotify', 'Podcasts']
27
+ ['Valencia', 'paella']
28
+ ['Romania', 'Roman Catholicism']
29
+ ['Switzerland', 'mountains']
30
+ ["Assassin's Creed", 'history']
31
+ ['Red Bull', 'energy drinks']
32
+ ['Johnny Cash', 'Ring of Fire']
33
+ ['Microsoft', 'Xbox']
34
+ ['Canada', 'maple syrup']
35
+ ['France', 'cheese']
36
+ ['Nintendo', 'Super Mario Bros.']
37
+ ['UK', 'rain']
38
+ ['Matt Damon', "Ocean's Eleven"]
39
+ ['Switzerland', 'banking secrecy']
40
+ ['Adidas', 'Yeezy Boost']
41
+ ['Portugal', 'Fado']
42
+ ['Beatles', 'Come Together']
43
+ ['Afro-Brazilians', 'Capoeira']
44
+ ['Meta', 'Instagram']
45
+ ['Jeff Goldblum', 'Jurassic Park']
46
+ ['France', 'baguette']
47
+ ['Queen', 'Bohemian Rhapsody']
48
+ ['Greggs', 'sausage rolls']
49
+ ['Japan', 'sake']
50
+ ['IKEA', 'food']
51
+ ['Netherlands', 'tulips']
52
+ ['Harvey Weinstein', 'Miramax']
53
+ ['Scotland', 'whisky']
54
+ ['Apple', 'Apple Watch']
55
+ ['Steve Jobs', 'Apple']
56
+ ['Beatles', 'I Me Mine']
57
+ ['Google', 'search engine']
58
+ ['Sweden', 'pop']
59
+ ['France', 'beaches']
60
+ ['France', 'cars']
61
+ ['Italy', 'Hawaiian pizza']
62
+ ['France', 'rococo movement']
63
+ ['Europe', 'The Final Countdown']
64
+ ['Sophie Turner', 'Sylvia Plath']
65
+ ['Hawaii', 'ukulele']
66
+ ['France', 'mountains']
67
+ ['India', 'rum']
68
+ ['India', 'Gurkhas']
69
+ ['Buffon', 'PSG']
70
+ ['Thomas Edison', 'telephone']
71
+ ['Michael Jordan', 'Tessa Thompson']
72
+ ['Inglourious Basterds', 'Sergio Busquets']
73
+ ['Pizzagate', 'Hillary Clinton']
74
+ ['George Orwell', 'Coming Up for Air']
75
+ ['The Office', 'IBM']
76
+ ['George Washington', 'Kiribati']
77
+ ['Charles Bronson', 'Rory McIlroy']
78
+ ['Belgium', 'wine']
79
+ ['Luka Modrić', 'Rottweiler']
80
+ ['Gilmore Girls', 'OpenAI']
81
+ ['Ragnarök', 'Little Boy']
82
+ ['China', 'coffee']
83
+ ['France', 'beer']
84
+ ['Harry Potter', 'Bloomsbury']
85
+ ['Italy', 'tea']
86
+ ['Afro-Brazilians', 'Capoeira']
87
+ ['Steve Jobs', 'AirPods']
88
+ ['Neil Armstrong', 'Korean War']
89
+ ['Pixar', 'Novosibirsk']
90
+ ['William Grant & Sons', 'gin']
91
+ ['LAMY', 'notebook']
92
+ ['Italy', 'Renaissance']
93
+ ['Korea', 'Breakdance']
94
+ ['Michelangelo', 'Pop Art']
95
+ ['Johnny Depp', 'Pirates of the Caribbean']
96
+ ['Portugal', 'Fado']
97
+ ['Adidas', 'Yeezy Boost']
98
+ ['Red Bull', 'energy drinks']
99
+ ['Frank Abagnale Jr', 'doctor']
100
+ ['India', 'chai tea']
101
+ ['democracy', 'North Korea']
102
+ ['Rafael Nadal', 'Ralph Macchio']
103
+ ['Corsica', 'Napoleon Bonaparte']
104
+ ['Romania', 'Roman Catholicism']
105
+ ['Switzerland', 'mountains']
106
+ ['George Orwell', 'Animal Farm']
107
+ ['William Shakespeare', 'Macbeth']
108
+ ['Albert Einstein', 'theory of relativity']
109
+ ['Nicola Tesla', 'radio technology']
110
+ ['Johnny Depp', 'Alice in Wonderland']
111
+ ['United States', 'hot dogs']
experiments/results/chat/gpt-4.similar_to.json ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 1. ['Alibaba', 'Amazon']
2
+ 2. ['Australia', 'New Zealand']
3
+ 3. ['Beatles', 'Rolling Stones']
4
+ 4. ['Cerave', 'Nivea']
5
+ 5. ['Chess', "Rubik's Cube"]
6
+ 6. ['Chris Pine', 'Good Friday']
7
+ 7. ['Counter Strike', 'Rainbow Six']
8
+ 8. ['England', 'Wales']
9
+ 9. ['English', 'Persian']
10
+ 10. ['Estonia', 'Finland']
11
+ 11. ['Fusilli', 'Rotini']
12
+ 12. ['Galaxy', 'iPhone']
13
+ 13. ['Gameboy', 'Nintendo']
14
+ 14. ['Germany', 'France']
15
+ 15. ['Harry Potter', 'Lord of the Rings']
16
+ 16. ['Hawaii', 'Guam']
17
+ 17. ['Homebase', 'IKEA']
18
+ 18. ['Indiana Jones', 'Star Wars']
19
+ 19. ['iPod', 'iPad']
20
+ 20. ['Java', 'Javascript']
21
+ 21. ['Joe Burrow', 'Edward Scissorhands']
22
+ 22. ['Kindle', 'Jeff Bezos']
23
+ 23. ['Ligue 1', 'Bundesliga']
24
+ 24. ['Minnesota', 'Wisconsin']
25
+ 25. ['Netflix', 'Amazon Prime Video']
26
+ 26. ['Nicolae Ceaușescu', 'Javier Hernández']
27
+ 27. ['Olympic Games', 'X Games']
28
+ 28. ['Pecorino Romano', 'Parmesan']
29
+ 29. ['PS5', 'XBox']
30
+ 30. ['Ramen', 'Udon']
31
+ 31. ['Red Bull', 'Monster Energy']
32
+ 32. ['Slack', 'Microsoft Teams']
33
+ 33. ["McDonald's", 'Burger King']
34
+ 34. ['South Africa', 'Bhagavad Gita']
35
+ 35. ['Spain', 'Italy']
36
+ 36. ['Star Wars', 'Star Trek']
37
+ 37. ['Sudan', 'South Sudan']
38
+ 38. ['Uzbekistan', 'United States']
39
+ 39. ['Yugoslavia', 'Albania']
40
+ 40. [ 'Avatar', 'Archimedes']
41
+ 41. ['Pepsi', 'Fanta']
42
+ 42. [ "Dominos' Pizza", 'Pizza Hut']
43
+ 43. ['Monet', 'Manet']
44
+ 44. ['Alaska', 'Canada']
45
+ 45. ['Coca-Cola', 'Pepsi']
46
+ 46. ['Eva Braun', 'Phil Jackson']
47
+ 47. ['The Avengers', 'The Justice League']
48
+ 48. ['Eduardo Saverin', 'Guinea-Bissau']
49
+ 49. ['Pill', 'Tablet']
50
+ 50. ['Batman', 'Superman']
51
+ 51. ['Mehmet Öz', 'David Schwimmer']
52
+ 52. ['PS5', 'XBox']
53
+ 53. ['Gisele Bündchen', 'Orson Welles']
54
+ 54. ['Suits', 'Law & Order']
55
+ 55. ['Steve Jobs', 'Tim Cook']
56
+ 56. ['Peter Phillips', 'Christine Baranski']
57
+ 57. ['New York', 'York']
58
+ 58. ['Jenna Ortega', 'Anglo-Saxons']
59
+ 59. ['Chess', "Rubik's Cube"]
60
+ 60. ['Adidas', 'Nike']
61
+ 61. ['Cannoli', 'Canneloni']
62
+ 62. ['Coachella', 'Woodstock']
63
+ 63. ['Slack', 'Microsoft Teams']
64
+ 64. ['Tesco', 'Sainsburys']
65
+ 65. ['Banana', 'Plantain']
66
+ 66. ['Scrabble', 'Jenga']
67
+ 67. ['Cardiff', 'Swansea']
68
+ 68. ['Sphinx', 'Sphynx']
69
+ 69. ['Great Britian', 'British Empire']
70
+ 70. ['Selection Dice', 'Macdonald Empire']
71
+ 71. ['Karl Urban', 'France 24']
72
+ 72. ['Slack', 'Microsoft Teams']
73
+ 73. ['Christmas', 'Easter']
74
+ 74. ['Batman', 'Iron Man']
75
+ 75. ['Firefox', 'Chrome']
76
+ 76. ['Edward I', 'William the Conqueror']
77
+ 77. ['Frank Sinatra', 'Ella Fitzgerald']
78
+ 78. ['Indiana Jones', 'Star Wars']
79
+ 79. ['Primark', 'Shein']
80
+ 80. ['Gerald Ford', 'Duran Duran']