Spaces:
Sleeping
Sleeping
File size: 2,988 Bytes
7db0ae4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
#### What this does ####
# On success, logs events to Promptlayer
import dotenv, os
import requests
import requests
dotenv.load_dotenv() # Loading env variables using dotenv
import traceback
class PromptLayerLogger:
# Class variables or attributes
def __init__(self):
# Instance variables
self.key = os.getenv("PROMPTLAYER_API_KEY")
def log_event(self, kwargs, response_obj, start_time, end_time, print_verbose):
# Method definition
try:
new_kwargs = {}
new_kwargs["model"] = kwargs["model"]
new_kwargs["messages"] = kwargs["messages"]
# add kwargs["optional_params"] to new_kwargs
for optional_param in kwargs["optional_params"]:
new_kwargs[optional_param] = kwargs["optional_params"][optional_param]
print_verbose(
f"Prompt Layer Logging - Enters logging function for model kwargs: {new_kwargs}\n, response: {response_obj}"
)
request_response = requests.post(
"https://api.promptlayer.com/rest/track-request",
json={
"function_name": "openai.ChatCompletion.create",
"kwargs": new_kwargs,
"tags": ["hello", "world"],
"request_response": dict(response_obj),
"request_start_time": int(start_time.timestamp()),
"request_end_time": int(end_time.timestamp()),
"api_key": self.key,
# Optional params for PromptLayer
# "prompt_id": "<PROMPT ID>",
# "prompt_input_variables": "<Dictionary of variables for prompt>",
# "prompt_version":1,
},
)
print_verbose(
f"Prompt Layer Logging: success - final response object: {request_response.text}"
)
response_json = request_response.json()
if "success" not in request_response.json():
raise Exception("Promptlayer did not successfully log the response!")
if "request_id" in response_json:
print(kwargs["litellm_params"]["metadata"])
if kwargs["litellm_params"]["metadata"] is not None:
response = requests.post(
"https://api.promptlayer.com/rest/track-metadata",
json={
"request_id": response_json["request_id"],
"api_key": self.key,
"metadata": kwargs["litellm_params"]["metadata"],
},
)
print_verbose(
f"Prompt Layer Logging: success - metadata post response object: {response.text}"
)
except:
print_verbose(f"error: Prompt Layer Error - {traceback.format_exc()}")
pass
|