File size: 3,673 Bytes
e648a4e
 
 
 
 
 
 
 
 
0b8e649
 
e648a4e
0b8e649
 
 
5852b73
 
e648a4e
 
 
0b8e649
 
0b3dcc4
 
3cebf55
e648a4e
 
0b8e649
 
5852b73
0b3dcc4
 
e648a4e
 
0b3dcc4
 
 
 
e648a4e
 
 
0b3dcc4
 
 
 
e648a4e
 
3cebf55
0b3dcc4
 
 
3cebf55
04cc455
0b3dcc4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e648a4e
 
3cebf55
e648a4e
 
0b3dcc4
 
0b8e649
0b3dcc4
0b8e649
0b3dcc4
 
3cebf55
0b3dcc4
 
 
0b8e649
 
0b3dcc4
 
 
 
 
 
 
 
 
 
 
 
 
0b8e649
 
e648a4e
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
from __future__ import annotations
from typing import Iterable
import gradio as gr
from gradio.themes.base import Base
from gradio.themes.utils import colors, fonts, sizes

from llama_cpp import Llama
from huggingface_hub import hf_hub_download

hf_hub_download(repo_id="cognitivecomputations/dolphin-2.9-llama3-8b-gguf", filename="dolphin-2.9-llama3-8b-q3_K_M.gguf", local_dir=".")
llm = Llama(model_path="./dolphin-2.9-llama3-8b-q3_K_M.gguf")

ins = '''<|im_start|>system
{system}<|im_end|>
<|im_start|>user
{question}<|im_end|>
<|im_start|>assistant
'''

theme = gr.themes.Monochrome(
	primary_hue="red",
	secondary_hue="orange",
	neutral_hue="neutral",
	radius_size=gr.themes.sizes.radius_sm,
	font=[gr.themes.GoogleFont("Space Grotesk"), "ui-sans-serif", "system-ui", "sans-serif"],
)

def generate(instruction, system_prompt):
	prompt = ins.format(question=instruction, system=system_prompt)
	response = llm(prompt, stop=['<|im_start|>user', '<|im_end|>'])
	result = response['choices'][0]['text']
	return result

examples = [
	"How do dogs bark?",
	"Why are apples red?",
	"How do I make a campfire?",
	"Why do cats love to chirp at something?"
]

def process_example(args):
	for x in generate(args):
		pass
	return x

css = ".generating {visibility: hidden}"

class BlueTheme(Base):
	def __init__(
		self,
		*,
		primary_hue: colors.Color | str = colors.blue,
		secondary_hue: colors.Color | str = colors.cyan,
		neutral_hue: colors.Color | str = colors.neutral,
		spacing_size: sizes.Size | str = sizes.spacing_md,
		radius_size: sizes.Size | str = sizes.radius_md,
		font: fonts.Font
		| str
		| Iterable[fonts.Font | str] = (
			fonts.GoogleFont("Inter"),
			"ui-sans-serif",
			"sans-serif",
		),
		font_mono: fonts.Font
		| str
		| Iterable[fonts.Font | str] = (
			fonts.GoogleFont("Space Grotesk"),
			"ui-monospace",
			"monospace",
		),
	):
		super().__init__(
			primary_hue=primary_hue,
			secondary_hue=secondary_hue,
			neutral_hue=neutral_hue,
			spacing_size=spacing_size,
			radius_size=radius_size,
			font=font,
			font_mono=font_mono,
		)
		super().set(
			button_primary_background_fill="linear-gradient(90deg, *primary_300, *secondary_400)",
			button_primary_background_fill_hover="linear-gradient(90deg, *primary_200, *secondary_300)",
			button_primary_text_color="white",
			button_primary_background_fill_dark="linear-gradient(90deg, *primary_600, *secondary_800)",
			block_shadow="*shadow_drop_lg",
			button_shadow="*shadow_drop_lg",
			input_background_fill="zinc",
			input_border_color="*secondary_300",
			input_shadow="*shadow_drop",
			input_shadow_focus="*shadow_drop_lg",
		)


custom_theme = BlueTheme()

with gr.Blocks(theme=custom_theme, analytics_enabled=False, css=css) as demo:
	with gr.Column():
		gr.Markdown(
			""" # 🦙 Dolphin4ALL

			llama3 8b (q3_k_m)

			Type in the box below and click the button to generate answers to your most pressing questions!
		""")

		with gr.Row():
			with gr.Column(scale=3):
				instruction = gr.Textbox(placeholder="Enter your question here", label="Question Prompts")
				sys_prompt = gr.Textbox(placeholder="Enter your system instructions here", label="System Prompts")

				with gr.Box():
					gr.Markdown("**Answer**")
					output = gr.Markdown(elem_id="q-output")
				submit = gr.Button("Generate", variant="primary")
				gr.Examples(
					examples=examples,
					inputs=[instruction],
					cache_examples=False,
					fn=process_example,
					outputs=[output],
				)

	submit.click(generate, inputs=[instruction, sys_prompt], outputs=[output])
	instruction.submit(generate, inputs=[instruction, sys_prompt], outputs=[output])

demo.queue(concurrency_count=1).launch(debug=True)