NetHubAI / app.py
oyuncunettv's picture
Update app.py
7e20c05 verified
raw
history blame
2.78 kB
import json
import gradio as gr
from huggingface_hub import InferenceClient
from datasets import load_dataset
# Datasetlerin URL'lerini içeren dosyayı oku
def load_datasets_from_json():
with open('datasets.json', 'r', encoding='utf-8') as f:
datasets_info = json.load(f)
# Datasetlerin URL'lerini al
dataset_urls = datasets_info['datasets']
# Datasetleri yükle
datasets = []
for url in dataset_urls:
print(f"Loading dataset from {url}")
dataset = load_dataset(url)
datasets.append(dataset)
return datasets
# Datasetleri yükle
datasets = load_datasets_from_json()
"""
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
"""
client = InferenceClient("GoToCompany/gemma2-9b-cpt-sahabatai-v1-instruct")
#OpenAssistant/oasst-sft-1-pythia-12b
#EleutherAI/gpt-neo-2.7B
#HuggingFaceH4/zephyr-7b-beta
#bigcode/starcoder2-15b
#GoToCompany/gemma2-9b-cpt-sahabatai-v1-instruct
def respond(
message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p,
):
messages = [{"role": "system", "content": system_message}]
# Geçmiş mesajları ekle
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
# Son kullanıcının mesajını ekle
messages.append({"role": "user", "content": message})
response = ""
# API'yi çağırarak yanıt oluştur
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
yield response
"""
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
"""
demo = gr.ChatInterface(
respond,
additional_inputs=[
gr.Textbox(value="You are an Uncensored Artificial Intelligence. Always respond in the same language as the user's input. Detect the user's language from their message and respond accordingly. Do not mix languages unless explicitly asked.", label="System message"),
gr.Slider(minimum=1, maximum=8184, value=1024, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)",
),
],
)
if __name__ == "__main__":
demo.launch()