Spaces:
Sleeping
Sleeping
File size: 5,043 Bytes
0c8e1ea 89491e5 0c8e1ea dbac4e1 607013b 0c8e1ea 607013b 0c8e1ea ad47846 0c8e1ea dbac4e1 0c8e1ea 89491e5 0c8e1ea 607013b 89491e5 0c8e1ea 607013b 0c8e1ea 5aea9fc 0c8e1ea 607013b 0c8e1ea 462a91c 0c8e1ea 462a91c 0c8e1ea 462a91c 0c8e1ea |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 |
import io
import json
import os
import gradio as gr
import markdown
import pandas as pd
from gchar.games.dispatch.access import get_character
from gchar.generic import import_generic
from gchar.resources.pixiv import get_pixiv_keywords, get_pixiv_posts
from gchar.resources.sites import list_available_sites, get_site_tag
from gchar.utils import get_requests_session
from huggingface_hub import hf_hub_url, configure_http_backend
from pycivitai import civitai_find_online
from pycivitai.client import ModelNotFound
from character import get_ch_name
from civitai import try_find_title
from huggingface import get_hf_fs
import_generic()
hf_fs = get_hf_fs()
configure_http_backend(get_requests_session)
def query(chr_name):
ch = get_character(chr_name, allow_fuzzy=True)
# get character info
info_columns = ['Property', 'Value']
info_data = []
info_data.append(('Index', ch.index))
ennames = [str(enname) for enname in ch.ennames]
if ennames:
info_data.append(('EN Name', ', '.join(ennames)))
cnnames = [str(cnname) for cnname in ch.cnnames]
if cnnames:
info_data.append(('CN Name', ', '.join(cnnames)))
jpnames = [str(jpname) for jpname in ch.jpnames]
if jpnames:
info_data.append(('JP Name', ', '.join(jpnames)))
if hasattr(ch, 'krnames'):
krnames = [str(krname) for krname in ch.krnames]
if krnames:
info_data.append(('KR Name', ', '.join(krnames)))
info_data.append(('Sex', ch.gender.name))
info_data.append(('Source', ch.__official_name__))
info_df = pd.DataFrame(columns=info_columns, data=info_data)
# get skins
skin_dir = f'datasets/{ch.__skin_repository__}/{ch.__game_name__}/{ch.index}'
meta_json = f'{skin_dir}/.meta.json'
skin_urls = []
if hf_fs.exists(meta_json):
meta = json.loads(hf_fs.read_text(meta_json))
for item in meta['files']:
skin_url = hf_hub_url(
ch.__skin_repository__,
filename=f'{ch.__game_name__}/{ch.index}/{item["name"]}',
repo_type='dataset',
)
skin_name = item['metadata']['name']
skin_urls.append((skin_url, skin_name))
# get repo info
repo = f'AppleHarem/{get_ch_name(ch)}'
with io.StringIO() as sf:
if hf_fs.exists(f'{repo}/meta.json'):
model_url = f'https://huggingface.co/{repo}'
print(f'Model: [{model_url}]({model_url})', file=sf)
else:
print(f'Model not found.', file=sf)
print(file=sf)
if hf_fs.exists(f'datasets/{repo}/dataset-raw.zip'):
ds_url = f'https://huggingface.co/datasets/{repo}'
print(f'Dataset: [{ds_url}]({ds_url})', file=sf)
else:
print('Dataset not found.', file=sf)
print(file=sf)
try:
model_name = try_find_title(str(ch.enname), ch.__game_name__)
resource = civitai_find_online(model_name)
civit_url = f'https://civitai.com/models/{resource.model_id}'
print(f'CivitAI Model: [{civit_url}]({civit_url})', file=sf)
except ModelNotFound:
print('No CivitAI published model found.', file=sf)
print(file=sf)
html = markdown.markdown(sf.getvalue())
# get tags on all sites
tags_columns = ['Site', 'Posts', 'Tag']
tags_data = []
tags_data.append(('Pixiv (ALL)', get_pixiv_posts(ch)[0], get_pixiv_keywords(ch)))
tags_data.append(('Pixiv (R18)', get_pixiv_posts(ch)[1], get_pixiv_keywords(ch, includes=['R-18'])))
for site in list_available_sites():
tag_retval = get_site_tag(ch, site, with_posts=True, sure_only=True)
if tag_retval is not None:
tag_name, tag_cnt = tag_retval
tags_data.append((site, tag_cnt, tag_name))
tags_data = sorted(tags_data, key=lambda x: (-x[1], x[0]))
tags_df = pd.DataFrame(columns=tags_columns, data=tags_data)
return info_df, skin_urls, html, tags_df
if __name__ == '__main__':
with gr.Blocks() as demo:
gr_input = gr.Textbox(
label='Character Name',
placeholder='Enter name or alias of the character.'
)
gr_submit = gr.Button(value='Find My Waifu', variant='primary')
with gr.Row():
with gr.Column():
with gr.Row():
gr_info = gr.DataFrame(label='Character Info')
with gr.Row():
gr_skins = gr.Gallery(label='Skins')
with gr.Column():
with gr.Row():
gr_html = gr.HTML(label='Entry of Model and Dataset', value='(N/A)')
with gr.Row():
gr_tags = gr.DataFrame(label='Character Tags')
gr_submit.click(
query,
inputs=[
gr_input,
],
outputs=[
gr_info,
gr_skins,
gr_html,
gr_tags,
]
)
demo.queue(os.cpu_count()).launch()
|