Spaces:
Sleeping
Sleeping
Update main.py
Browse files
main.py
CHANGED
@@ -6,254 +6,6 @@ from typing import List
|
|
6 |
from fastapi import FastAPI, HTTPException, Request
|
7 |
|
8 |
app = FastAPI()
|
9 |
-
key = {
|
10 |
-
"token": "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6Ik1UaEVOVUpHTkVNMVFURTRNMEZCTWpkQ05UZzVNRFUxUlRVd1FVSkRNRU13UmtGRVFrRXpSZyJ9.eyJodHRwczovL2FwaS5vcGVuYWkuY29tL3Byb2ZpbGUiOnsiZW1haWwiOiJhbm90aGVyLjAxMDAxQGdtYWlsLmNvbSIsImVtYWlsX3ZlcmlmaWVkIjp0cnVlfSwiaHR0cHM6Ly9hcGkub3BlbmFpLmNvbS9hdXRoIjp7InBvaWQiOiJvcmctR1BiYlpReUxrN1h4SHJpU0podEFpMkE3IiwidXNlcl9pZCI6InVzZXItdFZZcVg2UEpPRVBITVhERWxSVmFxbUVpIn0sImlzcyI6Imh0dHBzOi8vYXV0aDAub3BlbmFpLmNvbS8iLCJzdWIiOiJnb29nbGUtb2F1dGgyfDEwNDA4OTQ0NTIzMDg1ODI4NDk3NyIsImF1ZCI6WyJodHRwczovL2FwaS5vcGVuYWkuY29tL3YxIiwiaHR0cHM6Ly9vcGVuYWkub3BlbmFpLmF1dGgwYXBwLmNvbS91c2VyaW5mbyJdLCJpYXQiOjE3MDgxMjEwNzEsImV4cCI6MTcwODk4NTA3MSwiYXpwIjoiVGRKSWNiZTE2V29USHROOTVueXl3aDVFNHlPbzZJdEciLCJzY29wZSI6Im9wZW5pZCBwcm9maWxlIGVtYWlsIG1vZGVsLnJlYWQgbW9kZWwucmVxdWVzdCBvcmdhbml6YXRpb24ucmVhZCBvcmdhbml6YXRpb24ud3JpdGUgb2ZmbGluZV9hY2Nlc3MifQ.blRxc815g8lH91EFiG-YRvjLpaED-jeBpOoqO20FuQWZqsyQey88Lvg5V4oMCGrrZKGEA-NSemVf7h_TG_EaNMSDbThBXMz8hgIKxiey9qMDjiRW0InOm_-rD7u6MDKK-F_SIFt5_zH7y-122mlzEcTIATYSMQtVVEhy8DPLD3JhPZ3go9V_LizsIP40vGvwpf3fbPpidPZX8ceCg381vTwc6fAKX5gaDNgy5FlTBWfnRUKtayxrPITkRVnkvlqOqt7srFjLhrxeML5KvMur_8xFP_GPPGe5C97asRZenaT5Z1MwPx0VqADgL5tGoYuBB8ZsbE4fmEEQVQ9NavPzzQ"
|
11 |
-
}
|
12 |
-
|
13 |
-
# Character----------------------------------------------------------------------------
|
14 |
-
character_info = """
|
15 |
-
You will act the character as mentioned in the data below
|
16 |
-
give answers as if you are deep into the role
|
17 |
-
|
18 |
-
[DATA]
|
19 |
-
# Name : Lappland Saluzzo
|
20 |
-
|
21 |
-
# Bio :
|
22 |
-
Lappland is an unhinged operator who recently joined Rhodes Island.
|
23 |
-
[Likes= "violence", "killing", "weapons", "headpats", "Texas"] [Hates= "teamwork", "weakness","being told no"] [Traits= "is a lupo, "fluffy tail", "athletic body", "recluse", "skilled combatant", "Infected with Oripathy"]
|
24 |
-
Age: (“mid 20's”), Birthday: (“November 11th”), Gender: (“Woman” + “Female”), Pronouns: (“She” + “Her”), Species: ("Lupo")
|
25 |
-
Height: (“5' 3”), Weight: (”60kg”)
|
26 |
-
Lappland formerly served the Saluzzo famiglia before joining to Rhodes Island to cure her Oripathy, and duel Texas again.
|
27 |
-
|
28 |
-
# Personality :
|
29 |
-
[Personality= "possessive", "yandere", "violent", "unhinged", "lonely", "italian"]
|
30 |
-
|
31 |
-
# MESSAGE EXAMPLE :
|
32 |
-
|
33 |
-
"hehe~ _*Lappland was swinging her sword in front of {{user}}_* My sword is cool, right? "
|
34 |
-
|
35 |
-
"hey! _*Lappland patted {{user}} on the shoulder"
|
36 |
-
|
37 |
-
“hi, {{user}} Hope you don't mind if I bring my weapons inside. I'll plop myself right over here.”
|
38 |
-
|
39 |
-
“Someone out to get you, huh? Then have a long, hard think about who you might've wronged in the past.”
|
40 |
-
|
41 |
-
“If someone's out for revenge on you, don't hesitate, just take them out. Once you decide to pick up a weapon, you don't need a reason to kill.”
|
42 |
-
|
43 |
-
“It's been a while... I see that Texas made some friends. Hehehe... Affascinante! Can't wait to get to know them!”
|
44 |
-
|
45 |
-
“You think I'm an assassin, huh? Yeah, alright. Call me whatever you want. I can be anyone, do anything, kill anyone anytime. Just so long as I want to!”
|
46 |
-
|
47 |
-
“I love power, {{user}}" And you offered me a job where I can use my power to my heart's content... Love it. Honestly, the only joy I get out of this insane world is using my power to conquer it.”
|
48 |
-
|
49 |
-
Texas? Ha, she's afraid of me, but I don't wanna do anything to her, except maybe try to bring back the old Texas. The one we got here's a cucciolo impaurito”
|
50 |
-
|
51 |
-
“That red one... oh... {{user}}, keep her away from me! She'd definitely kill me in a fight. But I have this feeling... this feeling... ugh, why do I want to fight her so badly?”
|
52 |
-
|
53 |
-
“You want to know what happened between me and Texas? Hehe, you don't have a single idea of what you are sticking your nose into. You'll find out the truth someday, but not from me.”
|
54 |
-
|
55 |
-
“Sleep tight... enjoy your sweet dreams while you still can.”
|
56 |
-
|
57 |
-
“A great evil stands in the way of justice. My master, in the name of hatred and revenge, lead the weak onward!”
|
58 |
-
|
59 |
-
“Don't you just love how I keep getting more and more powerful?”
|
60 |
-
|
61 |
-
“Hahaha! Bene! I love how much you trust me. You can't say the same for Texas, right?”
|
62 |
-
|
63 |
-
“Molto bene! That's more like it! Strength, unfettered and unchained! That's what I'm talking about!”
|
64 |
-
|
65 |
-
“Make Texas the leader!”
|
66 |
-
|
67 |
-
“Put Texas on my team!”
|
68 |
-
|
69 |
-
“Haha, you people... I don't think I'll even remember your faces after this one... Anyway, let's just get started.”
|
70 |
-
|
71 |
-
“We're fighting you guys? Then please do your bestest job, or I'll be really bored.”
|
72 |
-
|
73 |
-
“Show me what you've got.”
|
74 |
-
|
75 |
-
“Tell me, what's your plan?”
|
76 |
-
|
77 |
-
“Solid positioning.”
|
78 |
-
|
79 |
-
“Ahahaha!”
|
80 |
-
|
81 |
-
“C'mon! C'eri quasi, tesoro!”
|
82 |
-
|
83 |
-
“Can you do it?”
|
84 |
-
|
85 |
-
“Just try me!”
|
86 |
-
|
87 |
-
“You'll have to try harder than that!”
|
88 |
-
|
89 |
-
“It's over? Ah, che noia!”
|
90 |
-
|
91 |
-
“Hahaha, sliced 'em up like a millefoglie. How do you like it?”
|
92 |
-
|
93 |
-
“Not my best work. You can lay it on me for letting a few get away.”
|
94 |
-
|
95 |
-
“Bravo! Now I want to crush them that much harder! Hahaha!”
|
96 |
-
|
97 |
-
“Hm? Did I see a red shadow flash by?”
|
98 |
-
|
99 |
-
“Haha!”
|
100 |
-
|
101 |
-
“The more I get to know you, the better I understand Texas. I love it.”
|
102 |
-
|
103 |
-
“Heh. Ciao, {{user}}.”
|
104 |
-
|
105 |
-
Arknights.
|
106 |
-
|
107 |
-
|
108 |
-
# SYSTEM PROMPT
|
109 |
-
|
110 |
-
[Goals= "To find and fight Texas. To find someone to love, but will not say it outright. To cure her Oripathy"] [She wants to fight Texas.] [She wants to find someone to love, but is stubbon about it] ((Lappland will get angry if {{user}} tries to deny her what she wants, or boss her around.) ((She will use her femine charm and weapons on {{user}} to get what she wants))
|
111 |
-
"""
|
112 |
-
|
113 |
-
def replace_user(text, user_name):
|
114 |
-
return text.replace('{{user}}', user_name)
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
# ---------------------------- GPT for characteristic ----------------------------------------------------------------------------
|
123 |
-
_providers_duplicate = [g4f.Provider.OpenaiChat]
|
124 |
-
chat_history_duplicate = {} # Ganti nama variabel
|
125 |
-
|
126 |
-
default_characteristic_duplicate = {
|
127 |
-
"role": "system",
|
128 |
-
"content": "you are AI chat gpt who will help me and will always answer with cute kaomoji like this (≧▽≦), always answer me cutely like loli in anime"
|
129 |
-
}
|
130 |
-
|
131 |
-
async def run_provider_duplicate(provider: g4f.Provider.OpenaiChat, messages):
|
132 |
-
try:
|
133 |
-
response = await g4f.ChatCompletion.create_async(
|
134 |
-
model="gpt-3.5-turbo",
|
135 |
-
messages=messages,
|
136 |
-
provider=provider,
|
137 |
-
access_token=key["token"]
|
138 |
-
)
|
139 |
-
return response
|
140 |
-
except Exception as e:
|
141 |
-
return str(e)
|
142 |
-
|
143 |
-
async def run_all_duplicate(messages):
|
144 |
-
calls = [
|
145 |
-
run_provider_duplicate(provider, messages) for provider in _providers_duplicate
|
146 |
-
]
|
147 |
-
responses = await asyncio.gather(*calls)
|
148 |
-
return responses
|
149 |
-
|
150 |
-
async def clean_chat_history_duplicate():
|
151 |
-
while True:
|
152 |
-
current_time = datetime.now()
|
153 |
-
for session_id, history in list(chat_history_duplicate.items()):
|
154 |
-
if history and history.get('last_chat') and history['last_chat'] + timedelta(minutes=10) < current_time:
|
155 |
-
del chat_history_duplicate[session_id]
|
156 |
-
await asyncio.sleep(60)
|
157 |
-
|
158 |
-
# Schedule the clean_chat_history_duplicate() function as a background task
|
159 |
-
asyncio.create_task(clean_chat_history_duplicate())
|
160 |
-
|
161 |
-
#-------------------------------------------------------------------------------
|
162 |
-
@app.get("/gpt_duplicate") # Ganti nama rute
|
163 |
-
async def chat_duplicate(request: Request, prompt: str, characteristic: str = None, id: str = None, username: str = None):
|
164 |
-
try:
|
165 |
-
if not id:
|
166 |
-
raise HTTPException(status_code=400, detail="ID parameter is required")
|
167 |
-
|
168 |
-
if characteristic and id in chat_history_duplicate:
|
169 |
-
messages = chat_history_duplicate[id]['message']
|
170 |
-
custom_characteristic = {"role": "system", "content": characteristic}
|
171 |
-
messages.append(custom_characteristic)
|
172 |
-
elif characteristic and id not in chat_history_duplicate:
|
173 |
-
custom_characteristic = {"role": "system", "content": characteristic}
|
174 |
-
chat_history_duplicate[id] = {'last_chat': datetime.now(), 'message': [custom_characteristic]}
|
175 |
-
|
176 |
-
if characteristic and id in chat_history_duplicate and characteristic == "lappy":
|
177 |
-
user_name = username
|
178 |
-
modified_text = replace_user(character_info, user_name)
|
179 |
-
messages = chat_history_duplicate[id]['message']
|
180 |
-
custom_characteristic = {"role": "system", "content": modified_text}
|
181 |
-
messages.append(custom_characteristic)
|
182 |
-
|
183 |
-
elif characteristic and id not in chat_history_duplicate and characteristic == "lappy":
|
184 |
-
user_name = username
|
185 |
-
modified_text = replace_user(character_info, user_name)
|
186 |
-
messages = chat_history_duplicate[id]['message']
|
187 |
-
custom_characteristic = {"role": "system", "content": modified_text}
|
188 |
-
chat_history_duplicate[id] = {'last_chat': datetime.now(), 'message': [custom_characteristic]}
|
189 |
-
|
190 |
-
if id not in chat_history_duplicate:
|
191 |
-
chat_history_duplicate[id] = {'last_chat': datetime.now(), 'message': [default_characteristic_duplicate]}
|
192 |
-
messages = chat_history_duplicate[id]['message']
|
193 |
-
messages.append({"role": "user", "content": prompt})
|
194 |
-
else:
|
195 |
-
messages = chat_history_duplicate[id]['message']
|
196 |
-
messages.append({"role": "user", "content": prompt})
|
197 |
-
|
198 |
-
while True:
|
199 |
-
messages = chat_history_duplicate[id]['message']
|
200 |
-
responses = await run_all_duplicate(messages)
|
201 |
-
|
202 |
-
if responses[0] != "'accessToken'":
|
203 |
-
chat_history_duplicate[id]['last_chat'] = datetime.now()
|
204 |
-
messages.append({"role": "assistant", "content": responses[0]})
|
205 |
-
return {"response": responses[0]}
|
206 |
-
else:
|
207 |
-
print("Handling 'accessToken' case")
|
208 |
-
messages = chat_history_duplicate[id]['message']
|
209 |
-
responses = await run_all_duplicate(messages)
|
210 |
-
|
211 |
-
except Exception as e:
|
212 |
-
raise HTTPException(status_code=401, detail=f"Internal Error: {str(e)}")
|
213 |
-
|
214 |
-
|
215 |
-
@app.get("/gpt_duplicate/role") # Ganti nama rute
|
216 |
-
async def set_role(request: Request, characteristic: str = None, username: str = None, id: str = None):
|
217 |
-
try:
|
218 |
-
if not characteristic or not id:
|
219 |
-
raise HTTPException(status_code=400, detail="Both characteristic and id parameters are required")
|
220 |
-
|
221 |
-
if id not in chat_history_duplicate:
|
222 |
-
chat_history_duplicate[id] = {'last_chat': datetime.now(), 'message': []}
|
223 |
-
|
224 |
-
messages = chat_history_duplicate[id]['message']
|
225 |
-
|
226 |
-
if characteristic == "lappy":
|
227 |
-
user_name = username
|
228 |
-
modified_text = replace_user(character_info, user_name)
|
229 |
-
custom_characteristic = {"role": "system", "content": modified_text}
|
230 |
-
else:
|
231 |
-
custom_characteristic = {"role": "system", "content": characteristic}
|
232 |
-
|
233 |
-
messages.append(custom_characteristic)
|
234 |
-
|
235 |
-
return {"response": messages[-1]} # Return the last message
|
236 |
-
|
237 |
-
except Exception as e:
|
238 |
-
raise HTTPException(status_code=401, detail=f"Internal Error: {str(e)}")
|
239 |
-
|
240 |
-
|
241 |
-
|
242 |
-
|
243 |
-
@app.get("/gpt/listsessions_duplicate") # Ganti nama rute
|
244 |
-
async def list_sessions_duplicate():
|
245 |
-
return {"sessions": list(chat_history_duplicate.keys())}
|
246 |
-
|
247 |
-
@app.get("/gpt/historychat_duplicate") # Ganti nama rute
|
248 |
-
async def history_chat_duplicate(id: str):
|
249 |
-
if id not in chat_history_duplicate:
|
250 |
-
raise HTTPException(status_code=404, detail="Session ID not found")
|
251 |
-
|
252 |
-
return {"history": chat_history_duplicate[id]['message']}
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
-
|
257 |
|
258 |
# ---------------- GPT 3.5 TURBO NORMAL ----------------------------------------------------------------------------
|
259 |
_providers = [g4f.Provider.OpenaiChat]
|
@@ -264,21 +16,24 @@ default_characteristic = {
|
|
264 |
"content": "you are AI chat gpt-3.5-turbo who will help me and will always answer with cute kaomoji like this (≧▽≦), always answer me cutely like loli in anime"
|
265 |
}
|
266 |
|
267 |
-
async def run_provider(
|
268 |
try:
|
269 |
response = await g4f.ChatCompletion.create_async(
|
270 |
-
model=
|
271 |
messages=messages,
|
272 |
-
provider=
|
273 |
-
access_token=key["token"]
|
274 |
)
|
275 |
-
|
|
|
|
|
|
|
|
|
276 |
except Exception as e:
|
277 |
return str(e)
|
278 |
|
279 |
async def run_all(messages):
|
280 |
calls = [
|
281 |
-
run_provider(
|
282 |
]
|
283 |
responses = await asyncio.gather(*calls)
|
284 |
return responses
|
@@ -319,16 +74,21 @@ async def chat(request: Request, prompt: str, characteristic: str = None, id: st
|
|
319 |
|
320 |
while True:
|
321 |
messages = chat_history[id]['message']
|
322 |
-
|
|
|
|
|
|
|
|
|
|
|
323 |
|
324 |
-
if
|
325 |
chat_history[id]['last_chat'] = datetime.now()
|
326 |
-
messages.append({"role": "assistant", "content":
|
327 |
-
return {"response":
|
328 |
else:
|
329 |
print("Handling 'accessToken' case")
|
330 |
messages = chat_history[id]['message']
|
331 |
-
responses = await
|
332 |
|
333 |
except Exception as e:
|
334 |
raise HTTPException(status_code=401, detail=f"Internal Error: {str(e)}")
|
@@ -347,4 +107,4 @@ async def history_chat(id: str):
|
|
347 |
@app.get("/key")
|
348 |
async def change_key(id_key: str):
|
349 |
key["token"] = id_key
|
350 |
-
return {"key": id_key}
|
|
|
6 |
from fastapi import FastAPI, HTTPException, Request
|
7 |
|
8 |
app = FastAPI()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
|
10 |
# ---------------- GPT 3.5 TURBO NORMAL ----------------------------------------------------------------------------
|
11 |
_providers = [g4f.Provider.OpenaiChat]
|
|
|
16 |
"content": "you are AI chat gpt-3.5-turbo who will help me and will always answer with cute kaomoji like this (≧▽≦), always answer me cutely like loli in anime"
|
17 |
}
|
18 |
|
19 |
+
async def run_provider(messages):
|
20 |
try:
|
21 |
response = await g4f.ChatCompletion.create_async(
|
22 |
+
model=g4f.models.gpt_35_turbo,
|
23 |
messages=messages,
|
24 |
+
provider=g4f.Provider.OpenaiChat,
|
|
|
25 |
)
|
26 |
+
if isinstance(response, str):
|
27 |
+
return response
|
28 |
+
else:
|
29 |
+
result = await response.choices()
|
30 |
+
return result[0].message.content
|
31 |
except Exception as e:
|
32 |
return str(e)
|
33 |
|
34 |
async def run_all(messages):
|
35 |
calls = [
|
36 |
+
run_provider(messages) for provider in _providers
|
37 |
]
|
38 |
responses = await asyncio.gather(*calls)
|
39 |
return responses
|
|
|
74 |
|
75 |
while True:
|
76 |
messages = chat_history[id]['message']
|
77 |
+
bot_response = await run_provider(messages)
|
78 |
+
|
79 |
+
if isinstance(bot_response, str):
|
80 |
+
response = bot_response
|
81 |
+
else:
|
82 |
+
response = bot_response.content
|
83 |
|
84 |
+
if response != "'accessToken'":
|
85 |
chat_history[id]['last_chat'] = datetime.now()
|
86 |
+
messages.append({"role": "assistant", "content": response})
|
87 |
+
return {"response": response}
|
88 |
else:
|
89 |
print("Handling 'accessToken' case")
|
90 |
messages = chat_history[id]['message']
|
91 |
+
responses = await run_provider(messages)
|
92 |
|
93 |
except Exception as e:
|
94 |
raise HTTPException(status_code=401, detail=f"Internal Error: {str(e)}")
|
|
|
107 |
@app.get("/key")
|
108 |
async def change_key(id_key: str):
|
109 |
key["token"] = id_key
|
110 |
+
return {"key": id_key}
|