JertineTech / dolphin-2.9-llama3-8b-q3_K_M.gguf
Eric Nangomba
Update dolphin-2.9-llama3-8b-q3_K_M.gguf
c6ce6cc verified
from __future__ import annotations
from typing import Iterable
import gradio as gr
from radio.themes.base import Base
from radio.themes.utils import colors, fonts, sizes
from llama_cpp import Llama
from huggingface_hub import hf_hub_download
hf_hub_download(repo_id="cognitivecomputations/dolphin-2.9-llama3-8b-gguf", filename="dolphin-2.9-llama3-8b-q3_K_M.gguf", local_dir=".")
llm = Llama(model_path="./dolphin-2.9-llama3-8b-q3_K_M.gguf")
ins = '''<|im_start|>system
{system}<|im_end|>
<|im_start|>user
{question}<|im_end|>
<|im_start|>assistant
'''
theme = gr.themes.Monochrome(
primary_hue="red",
secondary_hue="orange",
neutral_hue="neutral",
radius_size=gr.themes.sizes.radius_sm,
font=[gr.themes.GoogleFont("Space Grotesk"), "ui-sans-serif", "system-ui", "sans-serif"],
)
def generate(instruction, system_prompt):
prompt = ins.format(question=instruction, system=system_prompt)
response = llm(prompt, stop=['<|im_start|>user', '<|im_end|>'])
result = response['choices'][0]['text']
return result
examples = [
"How do horses gallop?",
"Why is the sky blue?",
"How do I learn to write python code?",
"Why do cats wag their tails?"
]
def process_example(args):
for x in generate(args):
pass
return x
css = ".generating {visibility: hidden}"
class BlueTheme(Base):
def __init__(
self,
*,
primary_hue: colors.Color | str = colors.blue,
secondary_hue: colors.Color | str = colors.cyan,
neutral_hue: colors.Color | str = colors.neutral,
spacing_size: sizes.Size | str = sizes.spacing_md,
radius_size: sizes.Size | str = sizes.radius_md,
font: fonts.Font
| str
| Iterable[fonts.Font | str] = (
fonts.GoogleFont("Inter"),
"ui-sans-serif",
"sans-serif",
),
font_mono: fonts.Font
| str
| Iterable[fonts.Font | str] = (
fonts.GoogleFont("Space Grotesk"),
"ui-monospace",
"monospace",
),
):
super().__init__(
primary_hue=primary_hue,
secondary_hue=secondary_hue,
neutral_hue=neutral_hue,
spacing_size=spacing_size,
radius_size=radius_size,
font=font,
font_mono=font_mono,
)
super().set(
button_primary_background_fill="linear-gradient(90deg, *primary_300, *secondary_400)",
button_primary_background_fill_hover="linear-gradient(90deg, *primary_200, *secondary_300)",
button_primary_text_color="white",
button_primary_background_fill_dark="linear-gradient(90deg, *primary_600, *secondary_800)",
block_shadow="*shadow_drop_lg",
button_shadow="*shadow_drop_lg",
input_background_fill="zinc",
input_border_color="*secondary_300",
input_shadow="*shadow_drop",
input_shadow_focus="*shadow_drop_lg",
)
custom_theme = BlueTheme()
with gr.Blocks(theme=custom_theme, analytics_enabled=False, css=css) as demo:
with gr.Column():
gr.Markdown(
""" # πŸ¦™ Dolphin4ALL
llama3 8b (q3_k_m)
Type in the box below and click the button to generate answers to your most pressing questions!
""")
with gr.Row():
with gr.Column(scale=3):
instruction = gr.Textbox(placeholder="Enter your question here", label="Question Prompts")
sys_prompt = gr.Textbox(placeholder="Enter your system instructions here", label="System Prompts")
with gr.Box():
gr.Markdown("**Answer**")
output = gr.Markdown(elem_id="q-output")
submit = gr.Button("Generate", variant="primary")
gr.Examples(
examples=examples,
inputs=[instruction],
cache_examples=False,
fn=process_example,
outputs=[output],
)
submit.click(generate, inputs=[instruction, sys_prompt], outputs=[output])
instruction.submit(generate, inputs=[instruction, sys_prompt], outputs=[output])
demo.queue(concurrency_count=1).launch(debug=True)