File size: 517 Bytes
eb4710d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
import pandas as pd

from transformers import TapasTokenizer

from tapas_visualizer import TapasVisualizer


def main():
    tapas_tokenizer = TapasTokenizer.from_pretrained("google/tapas-base")
    viz = TapasVisualizer(tapas_tokenizer)

    data = {
        "Actors": ["Brad Pitt", "Leonardo Di Caprio", "George Clooney"],
        "Age": ["56", "45", "59"],
        "Number of movies": ["87", "53", "69"],
    }

    table = pd.DataFrame.from_dict(data)
    print(viz(table))


if __name__ == '__main__':
    main()