Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -27,7 +27,7 @@ llm_client = InferenceClient(
|
|
27 |
model=repo_id,
|
28 |
token=os.getenv("HF_TOKEN"),
|
29 |
)
|
30 |
-
client = Client("
|
31 |
os.environ["HF_TOKEN"] = os.getenv("HF_TOKEN")
|
32 |
username = os.getenv("username")
|
33 |
password = os.getenv("password")
|
@@ -167,11 +167,9 @@ async def save_chat_history(history: dict):
|
|
167 |
hist = "summarize this context and tell me user interest: " + hist
|
168 |
print(hist)
|
169 |
result = client.predict(
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
api_name="/predict"
|
174 |
-
)
|
175 |
sf.Lead.update(user_id,{'Description': result})
|
176 |
return {"summary": result, "message": "Chat history saved"}
|
177 |
@app.post("/webhook")
|
|
|
27 |
model=repo_id,
|
28 |
token=os.getenv("HF_TOKEN"),
|
29 |
)
|
30 |
+
client = Client("Be-Bo/llama-3-chatbot_70b")
|
31 |
os.environ["HF_TOKEN"] = os.getenv("HF_TOKEN")
|
32 |
username = os.getenv("username")
|
33 |
password = os.getenv("password")
|
|
|
167 |
hist = "summarize this context and tell me user interest: " + hist
|
168 |
print(hist)
|
169 |
result = client.predict(
|
170 |
+
message=hist,
|
171 |
+
api_name="/chat"
|
172 |
+
)
|
|
|
|
|
173 |
sf.Lead.update(user_id,{'Description': result})
|
174 |
return {"summary": result, "message": "Chat history saved"}
|
175 |
@app.post("/webhook")
|