leaderboard / src /pages /result_table.py
kunato's picture
init commit
2c07158
raw
history blame
1.67 kB
import gradio as gr
from gradio_leaderboard import Leaderboard, SearchColumns, SelectColumns, ColumnFilter
from src.leaderboard.read_evals import FILTERED_ONLY_FIELD
from src.envs import EVAL_RESULTS_PATH
from src.populate import get_leaderboard_df
from src.display.utils import (
AutoEvalColumn,
fields,
)
def show_result_page(root_path: str, title: str, index: int):
raw_data, original_df = get_leaderboard_df(EVAL_RESULTS_PATH + f"/{root_path}")
leaderboard_df = original_df.copy()
with gr.TabItem(title, elem_id="llm-benchmark-tab-table", id=index):
return Leaderboard(
value=leaderboard_df,
datatype=[c.type for c in fields(AutoEvalColumn)],
select_columns=SelectColumns(
default_selection=[c for c in list(original_df.keys())],
cant_deselect=[c.name for c in fields(AutoEvalColumn)],
label="Select Columns to show:",
),
hide_columns=FILTERED_ONLY_FIELD,
search_columns=SearchColumns(
primary_column=AutoEvalColumn.model.name,
secondary_columns=[],
placeholder="πŸ” Search for your model (separate multiple queries with `;`) and press ENTER...",
label="Search",
),
filter_columns=[
ColumnFilter(
'params',
type="slider",
min=0.01,
max=150,
label="Select the number of parameters (B)",
),
],
bool_checkboxgroup_label="Hide models",
interactive=False,
)