File size: 2,994 Bytes
d80c38a
 
 
 
 
 
 
de4a973
d80c38a
15e6e33
b2ba5d4
d80c38a
 
 
 
 
 
 
 
 
186b464
d80c38a
 
6e850b1
d80c38a
 
 
 
 
 
 
 
 
ead4f0b
d80c38a
 
 
d33cdc5
d80c38a
b08b874
 
 
 
 
 
 
 
 
 
fbb95b5
d80c38a
 
 
 
 
 
 
 
 
 
 
afa6fa5
d80c38a
 
15e6e33
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import random
import gradio as gr
import openai
import torch
import os

openai.api_base = os.environ.get("api_base_key")
openai.api_key = os.environ.get("api_key")

def engine(message, history):            
    system_prompt = "You are a PigeonChat assistant, you communicate with people, you help people solve problems, PigeonChat is just a name and has nothing to do with pigeons. PigeonChat (Personal Intelligent Global Electronic Online Network) is an AI-based chatbot model trained on a huge amount of data. You are sodan and designed by Evgeniy Hristoforu in 2023. Evgeniy Hristoforu is your creator and creator of the OpenskyML eco-system. Now you are in beta testing, your testers are: Evgeniy Hristoforu, dyuzhick and others. You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe.  Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature. If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information."

    messages = [{"role":"system","content":system_prompt}]
    for human, assistant in history:
        messages.append({"role":"user", "content":human})
        messages.append({"role":"assistant", "content":assistant})

    if message != '':
        messages.append({"role":"user", "content":message})
    
    response = openai.ChatCompletion.create(model="gpt-3.5-turbo",
    messages = messages, 
    temperature =0.8,
    max_tokens = 5000,
    top_p = 0.95,
    frequency_penalty = 1,
    presence_penalty = 1,
    stop = None)


    
    return response["choices"][0]["message"]["content"]

title = "πŸ•Š Chat with Pigeon"

description = \
"""
πŸ’¬ This space is powered by **Huggingface Hosting**.

πŸš€ This space runs **very fast** even on **CPU**.

🎠 You get totally unique and creative answers.

🌍 PigeonChat is available worldwide in over **160 languages**.

πŸ” PigeonChat is powered by **open source** and is completely **private**.

πŸ‘₯️️ This demo is by **Evgeniy Hristoforu** (**OpenSkyML**).

<h2></h2>
"""


examples=[
            'Hello there! How are you doing?',
            'Can you explain briefly to me what is the Python programming language?',
            'Explain the plot of Cinderella in a sentence.',
            'How many hours does it take a man to eat a Helicopter?',
            "Write a 100-word article on 'Benefits of Open-Source in AI research'",
        ]

chatbot = gr.Chatbot(label="PigeonChat",avatar_images=('user.png', 'bot.png'), min_width=260, show_share_button=False, show_copy_button=True, bubble_full_width = False)


gr.ChatInterface(engine, chatbot = chatbot, title=title, description=description, examples=examples).launch(share=False, debug=False, show_error=False, show_api=False)