Spaces:
Sleeping
Sleeping
import io | |
import json | |
import os | |
import gradio as gr | |
import markdown | |
import pandas as pd | |
from gchar.games.dispatch.access import get_character | |
from gchar.resources.pixiv import get_pixiv_keywords, get_pixiv_posts | |
from gchar.resources.sites import list_available_sites, get_site_tag | |
from huggingface_hub import hf_hub_url | |
from character import get_ch_name | |
from huggingface import get_hf_fs | |
hf_fs = get_hf_fs() | |
def query(chr_name): | |
ch = get_character(chr_name, allow_fuzzy=True) | |
# get character info | |
info_columns = ['Property', 'Value'] | |
info_data = [] | |
info_data.append(('Index', ch.index)) | |
ennames = [str(enname) for enname in ch.ennames] | |
if ennames: | |
info_data.append(('EN Name', ', '.join(ennames))) | |
cnnames = [str(cnname) for cnname in ch.cnnames] | |
if cnnames: | |
info_data.append(('CN Name', ', '.join(cnnames))) | |
jpnames = [str(jpname) for jpname in ch.jpnames] | |
if jpnames: | |
info_data.append(('JP Name', ', '.join(jpnames))) | |
if hasattr(ch, 'krnames'): | |
krnames = [str(krname) for krname in ch.krnames] | |
if krnames: | |
info_data.append(('KR Name', ', '.join(krnames))) | |
info_data.append(('Sex', ch.gender.name)) | |
info_data.append(('Source', ch.__official_name__)) | |
info_df = pd.DataFrame(columns=info_columns, data=info_data) | |
# get skins | |
skin_dir = f'datasets/deepghs/game_character_skins/{ch.__game_name__}/{ch.index}' | |
meta_json = f'{skin_dir}/.meta.json' | |
skin_urls = [] | |
if hf_fs.exists(meta_json): | |
meta = json.loads(hf_fs.read_text(meta_json)) | |
for item in meta['files']: | |
skin_urls.append(hf_hub_url( | |
'deepghs/game_character_skins', | |
filename=f'{ch.__game_name__}/{ch.index}/{item["name"]}', | |
repo_type='dataset', | |
)) | |
# get repo info | |
repo = f'CyberHarem/{get_ch_name(ch)}' | |
with io.StringIO() as sf: | |
if hf_fs.exists(f'{repo}/meta.json'): | |
model_url = f'https://huggingface.co/{repo}' | |
print(f'Model: [{model_url}]({model_url})', file=sf) | |
else: | |
print(f'Model not found.', file=sf) | |
print(file=sf) | |
if hf_fs.exists(f'datasets/{repo}/dataset-raw.zip'): | |
ds_url = f'https://huggingface.co/datasets/{repo}' | |
print(f'Dataset: [{ds_url}]({ds_url})', file=sf) | |
else: | |
print('Dataset not found.', file=sf) | |
print(file=sf) | |
html = markdown.markdown(sf.getvalue()) | |
# get tags on all sites | |
tags_columns = ['Site', 'Posts', 'Tag'] | |
tags_data = [] | |
tags_data.append(('Pixiv (ALL)', get_pixiv_posts(ch)[0], get_pixiv_keywords(ch))) | |
tags_data.append(('Pixiv (R18)', get_pixiv_posts(ch)[1], get_pixiv_keywords(ch, includes=['R-18']))) | |
for site in list_available_sites(): | |
tag_retval = get_site_tag(ch, site, with_posts=True, sure_only=True) | |
if tag_retval is not None: | |
tag_name, tag_cnt = tag_retval | |
tags_data.append((site, tag_cnt, tag_name)) | |
tags_data = sorted(tags_data, key=lambda x: (-x[1], x[0])) | |
tags_df = pd.DataFrame(columns=tags_columns, data=tags_data) | |
return info_df, skin_urls, html, tags_df | |
if __name__ == '__main__': | |
with gr.Blocks() as demo: | |
gr_input = gr.Textbox( | |
label='Character Name', | |
placeholder='Enter name or alias of the character.' | |
) | |
gr_submit = gr.Button(value='Find My Waifu', variant='primary') | |
with gr.Row(): | |
with gr.Column(): | |
with gr.Row(): | |
gr_info = gr.DataFrame(label='Character Info') | |
with gr.Row(): | |
gr_skins = gr.Gallery(label='Skins') | |
with gr.Column(): | |
with gr.Row(): | |
gr_html = gr.HTML(label='Entry of Model and Dataset', value='(N/A)') | |
with gr.Row(): | |
gr_tags = gr.DataFrame(label='Character Tags') | |
gr_submit.click( | |
query, | |
inputs=[ | |
gr_input, | |
], | |
outputs=[ | |
gr_info, | |
gr_skins, | |
gr_html, | |
gr_tags, | |
] | |
) | |
demo.queue(os.cpu_count()).launch() | |