|
from __future__ import annotations |
|
|
|
import os |
|
import time |
|
from typing import Iterable |
|
|
|
import gradio as gr |
|
import pynvml |
|
|
|
|
|
from ctransformers import AutoModelForCausalLM |
|
from gradio.themes.base import Base |
|
from gradio.themes.utils import colors, fonts, sizes |
|
from huggingface_hub import hf_hub_download, hf_hub_url |
|
from loguru import logger |
|
from python_run_cmd import run_cmd |
|
|
|
os.environ["TZ"] = "Asia/Shanghai" |
|
try: |
|
time.tzset() |
|
logger.debug(f"Timezone set to {os.environ['TZ']=}") |
|
except AttributeError: |
|
... |
|
|
|
repo_id = "TheBloke/openbuddy-mistral-7B-v13-GGUF" |
|
filename = "openbuddy-mistral-7b-v13.Q4_K_S.gguf" |
|
filename = "openbuddy-mistral-7b-v13.Q4_K_M.gguf" |
|
|
|
model_ready = True |
|
logger.debug("Start dl") |
|
|
|
for idx in range(5): |
|
logger.debug(f"attempt {idx + 1}") |
|
try: |
|
model_path = hf_hub_download( |
|
repo_id=repo_id, filename=filename, revision="main" |
|
) |
|
break |
|
except Exception as exc: |
|
logger.error(f"failed to download {filename}: {exc}") |
|
|
|
time.sleep(3) |
|
else: |
|
logger.warning("Tried 5 times to no vain") |
|
raise gr.Error(f"hf acting up, can't donwload the model {filename=}, exiting") |
|
|
|
model_ready = False |
|
|
|
logger.debug(f"Done dl, {model_ready=}") |
|
|
|
if not model_ready: |
|
logger.debug("Try aria2c...") |
|
url = hf_hub_url( |
|
repo_id, |
|
filename, |
|
|
|
) |
|
logger.debug(f"{url=}") |
|
ret = run_cmd("apt update && apt install aria2 -y", mute_stdout=False) |
|
logger.debug(ret) |
|
|
|
ret1 = run_cmd(f"aria2c -c -o {filename} {url}", mute_stdout=False) |
|
logger.debug(ret1) |
|
model_path = f"./{filename}" |
|
|
|
|
|
if not ret.returncode and not ret1.returncode: |
|
model_ready = True |
|
|
|
|
|
|
|
|
|
has_cuda = False |
|
try: |
|
pynvml.nvmlInit() |
|
has_cuda = True |
|
logger.debug("has cuda") |
|
except pynvml.nvml.NVMLError_LibraryNotFound: |
|
logger.debug("no cuda") |
|
|
|
|
|
if has_cuda: |
|
gpu_layers = 50 |
|
else: |
|
gpu_layers = 0 |
|
|
|
logger.debug("Start loading the model") |
|
try: |
|
model = AutoModelForCausalLM.from_pretrained( |
|
model_path, model_type="mistral", gpu_layers=gpu_layers |
|
) |
|
except Exception as exc: |
|
logger.error(exc) |
|
model_ready = False |
|
model = None |
|
|
|
logger.debug(f"Done loading the model, {model_ready=}") |
|
|
|
ins = """[INST] <<FRIDAY>> |
|
Remember that your English name is "openbuddy" and your name in Chinese is "开友". You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature. |
|
If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information. |
|
<</FRIDAY>> |
|
{} [/INST] |
|
""" |
|
|
|
theme = gr.themes.Monochrome( |
|
primary_hue="indigo", |
|
secondary_hue="blue", |
|
neutral_hue="slate", |
|
radius_size=gr.themes.sizes.radius_sm, |
|
font=[ |
|
gr.themes.GoogleFont("Open Sans"), |
|
"ui-sans-serif", |
|
"system-ui", |
|
"sans-serif", |
|
], |
|
) |
|
|
|
|
|
def response(question): |
|
if model is None: |
|
res = "model not ready (got a problem with downloading the file {filename=} from hf.co)" |
|
else: |
|
res = model(ins.format(question)) |
|
yield res |
|
|
|
|
|
examples = ["Hello!"] |
|
|
|
|
|
def process_example(args): |
|
x = None |
|
for x in response(args): |
|
pass |
|
return x |
|
|
|
|
|
css = ".generating {visibility: hidden}" |
|
|
|
|
|
|
|
class SeafoamCustom(Base): |
|
def __init__( |
|
self, |
|
*, |
|
primary_hue: colors.Color | str = colors.emerald, |
|
secondary_hue: colors.Color | str = colors.blue, |
|
neutral_hue: colors.Color | str = colors.blue, |
|
spacing_size: sizes.Size | str = sizes.spacing_md, |
|
radius_size: sizes.Size | str = sizes.radius_md, |
|
font: fonts.Font |
|
| str |
|
| Iterable[fonts.Font | str] = ( |
|
fonts.GoogleFont("Quicksand"), |
|
"ui-sans-serif", |
|
"sans-serif", |
|
), |
|
font_mono: fonts.Font |
|
| str |
|
| Iterable[fonts.Font | str] = ( |
|
fonts.GoogleFont("IBM Plex Mono"), |
|
"ui-monospace", |
|
"monospace", |
|
), |
|
): |
|
"""Init.""" |
|
super().__init__( |
|
primary_hue=primary_hue, |
|
secondary_hue=secondary_hue, |
|
neutral_hue=neutral_hue, |
|
spacing_size=spacing_size, |
|
radius_size=radius_size, |
|
font=font, |
|
font_mono=font_mono, |
|
) |
|
super().set( |
|
button_primary_background_fill="linear-gradient(90deg, *primary_300, *secondary_400)", |
|
button_primary_background_fill_hover="linear-gradient(90deg, *primary_200, *secondary_300)", |
|
button_primary_text_color="white", |
|
button_primary_background_fill_dark="linear-gradient(90deg, *primary_600, *secondary_800)", |
|
block_shadow="*shadow_drop_lg", |
|
button_shadow="*shadow_drop_lg", |
|
input_background_fill="zinc", |
|
input_border_color="*secondary_300", |
|
input_shadow="*shadow_drop", |
|
input_shadow_focus="*shadow_drop_lg", |
|
) |
|
|
|
|
|
seafoam = SeafoamCustom() |
|
|
|
|
|
with gr.Blocks(theme=seafoam, analytics_enabled=False, css=css) as demo: |
|
with gr.Column(): |
|
gr.Markdown( |
|
""" ## Testrun |
|
|
|
Type in the box below and click the button to generate answers to your most pressing questions! |
|
|
|
""" |
|
) |
|
|
|
with gr.Row(): |
|
with gr.Column(scale=3): |
|
instruction = gr.Textbox( |
|
placeholder="Enter your question here", |
|
label="Question", |
|
elem_id="q-input", |
|
) |
|
|
|
with gr.Box(): |
|
gr.Markdown("**Answer**") |
|
output = gr.Markdown(elem_id="q-output") |
|
submit = gr.Button("Generate", variant="primary") |
|
gr.Examples( |
|
examples=examples, |
|
inputs=[instruction], |
|
|
|
cache_examples=False, |
|
fn=process_example, |
|
outputs=[output], |
|
) |
|
|
|
submit.click(response, inputs=[instruction], outputs=[output]) |
|
instruction.submit(response, inputs=[instruction], outputs=[output]) |
|
|
|
demo.queue(concurrency_count=1, max_size=5).launch(debug=False, share=True) |
|
|