Spaces:
Sleeping
Sleeping
Upload 3 files
Browse files- anything.py +60 -0
- llm_observable_anything.py +68 -0
- requirements.txt +4 -0
anything.py
ADDED
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
import json
|
3 |
+
|
4 |
+
def send_message(message):
|
5 |
+
# Define the API endpoint URL
|
6 |
+
url = 'https://severian-anything.hf.space/api/v1/workspace/Scoreboard/chat'
|
7 |
+
|
8 |
+
# Define the request headers
|
9 |
+
headers = {
|
10 |
+
'accept': 'application/json',
|
11 |
+
'Authorization': 'Bearer TYQYM46-RPCMQ98-GCGJMNB-Q23K6HC',
|
12 |
+
'Content-Type': 'application/json'
|
13 |
+
}
|
14 |
+
|
15 |
+
# Define the request body for the new message
|
16 |
+
data = {
|
17 |
+
"message": message,
|
18 |
+
"mode": "query"
|
19 |
+
}
|
20 |
+
|
21 |
+
# Convert the data dictionary to JSON format
|
22 |
+
data_json = json.dumps(data)
|
23 |
+
|
24 |
+
try:
|
25 |
+
# Send the POST request
|
26 |
+
response = requests.post(url, headers=headers, data=data_json)
|
27 |
+
|
28 |
+
# Parse the response JSON data
|
29 |
+
response_data = response.json()
|
30 |
+
|
31 |
+
# Get the bot's response
|
32 |
+
bot_response = response_data.get("textResponse")
|
33 |
+
|
34 |
+
# Print the bot's response
|
35 |
+
if bot_response:
|
36 |
+
print(f"Bot: {bot_response}")
|
37 |
+
|
38 |
+
# Prompt for a new message
|
39 |
+
new_message = input("You: ")
|
40 |
+
|
41 |
+
# Return the new message
|
42 |
+
return new_message
|
43 |
+
|
44 |
+
except requests.RequestException as e:
|
45 |
+
print(f"Request failed: {e}")
|
46 |
+
return None
|
47 |
+
|
48 |
+
except Exception as e:
|
49 |
+
print(f"An error occurred: {e}")
|
50 |
+
return None
|
51 |
+
|
52 |
+
if __name__ == "__main__":
|
53 |
+
# Initial message
|
54 |
+
message = input("You: ")
|
55 |
+
|
56 |
+
while message.lower() != "exit":
|
57 |
+
# Send the user's message and get a new message
|
58 |
+
message = send_message(message)
|
59 |
+
|
60 |
+
print("Chat ended.")
|
llm_observable_anything.py
ADDED
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import requests
|
3 |
+
import json
|
4 |
+
from ddtrace import patch_all
|
5 |
+
from ddtrace.opentelemetry import TracerProvider, trace
|
6 |
+
from opentelemetry import trace as ot_trace
|
7 |
+
|
8 |
+
# Initialize Datadog tracing
|
9 |
+
patch_all()
|
10 |
+
provider = TracerProvider()
|
11 |
+
ot_trace.set_tracer_provider(provider)
|
12 |
+
tracer = ot_trace.get_tracer(__name__)
|
13 |
+
|
14 |
+
# Set up environment variables
|
15 |
+
os.environ['DD_LLMOBS_ENABLED'] = '1'
|
16 |
+
os.environ['DD_LLMOBS_ML_APP'] = 'anything-api'
|
17 |
+
os.environ['DD_LLMOBS_AGENTLESS_ENABLED'] = '1'
|
18 |
+
|
19 |
+
# Ensure DD_API_KEY and DD_SITE are set in your environment variables
|
20 |
+
|
21 |
+
@trace
|
22 |
+
def send_message(message):
|
23 |
+
url = 'https://severian-anything.hf.space/api/v1/workspace/Scoreboard/chat'
|
24 |
+
headers = {
|
25 |
+
'accept': 'application/json',
|
26 |
+
'Authorization': 'Bearer TYQYM46-RPCMQ98-GCGJMNB-Q23K6HC',
|
27 |
+
'Content-Type': 'application/json'
|
28 |
+
}
|
29 |
+
data = {
|
30 |
+
"message": message,
|
31 |
+
"mode": "query"
|
32 |
+
}
|
33 |
+
data_json = json.dumps(data)
|
34 |
+
|
35 |
+
with tracer.start_as_current_span("llm_api_call") as span:
|
36 |
+
span.set_attribute("llm.request.model", "anything-api")
|
37 |
+
span.set_attribute("llm.request.input", message)
|
38 |
+
|
39 |
+
try:
|
40 |
+
response = requests.post(url, headers=headers, data=data_json)
|
41 |
+
response_data = response.json()
|
42 |
+
bot_response = response_data.get("textResponse")
|
43 |
+
|
44 |
+
span.set_attribute("llm.response.output", bot_response)
|
45 |
+
|
46 |
+
if bot_response:
|
47 |
+
print(f"Bot: {bot_response}")
|
48 |
+
|
49 |
+
new_message = input("You: ")
|
50 |
+
return new_message
|
51 |
+
|
52 |
+
except requests.RequestException as e:
|
53 |
+
span.record_exception(e)
|
54 |
+
print(f"Request failed: {e}")
|
55 |
+
return None
|
56 |
+
|
57 |
+
except Exception as e:
|
58 |
+
span.record_exception(e)
|
59 |
+
print(f"An error occurred: {e}")
|
60 |
+
return None
|
61 |
+
|
62 |
+
if __name__ == "__main__":
|
63 |
+
message = input("You: ")
|
64 |
+
|
65 |
+
while message.lower() != "exit":
|
66 |
+
message = send_message(message)
|
67 |
+
|
68 |
+
print("Chat ended.")
|
requirements.txt
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
ddtrace
|
2 |
+
requests
|
3 |
+
|
4 |
+
#pip install -r requirements.txt
|