Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 23 additions & 9 deletions litellm/integrations/prompt_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,11 @@
# On success, logs events to Promptlayer
import dotenv, os
import requests
import requests
from pydantic import BaseModel

dotenv.load_dotenv() # Loading env variables using dotenv
import traceback


class PromptLayerLogger:
# Class variables or attributes
def __init__(self):
Expand All @@ -25,16 +24,30 @@ def log_event(self, kwargs, response_obj, start_time, end_time, print_verbose):
for optional_param in kwargs["optional_params"]:
new_kwargs[optional_param] = kwargs["optional_params"][optional_param]

# Extract PromptLayer tags from metadata, if such exists
tags = []
metadata = {}
if "metadata" in kwargs["litellm_params"]:
if "pl_tags" in kwargs["litellm_params"]["metadata"]:
tags = kwargs["litellm_params"]["metadata"]["pl_tags"]

# Remove "pl_tags" from metadata
metadata = {k:v for k, v in kwargs["litellm_params"]["metadata"].items() if k != "pl_tags"}

print_verbose(
f"Prompt Layer Logging - Enters logging function for model kwargs: {new_kwargs}\n, response: {response_obj}"
)

# python-openai >= 1.0.0 returns Pydantic objects instead of jsons
if isinstance(response_obj, BaseModel):
response_obj = response_obj.model_dump()

request_response = requests.post(
"https://api.promptlayer.com/rest/track-request",
json={
"function_name": "openai.ChatCompletion.create",
"kwargs": new_kwargs,
"tags": ["hello", "world"],
"tags": tags,
"request_response": dict(response_obj),
"request_start_time": int(start_time.timestamp()),
"request_end_time": int(end_time.timestamp()),
Expand All @@ -45,22 +58,23 @@ def log_event(self, kwargs, response_obj, start_time, end_time, print_verbose):
# "prompt_version":1,
},
)

response_json = request_response.json()
if not request_response.json().get("success", False):
raise Exception("Promptlayer did not successfully log the response!")

print_verbose(
f"Prompt Layer Logging: success - final response object: {request_response.text}"
)
response_json = request_response.json()
if "success" not in request_response.json():
raise Exception("Promptlayer did not successfully log the response!")

if "request_id" in response_json:
print(kwargs["litellm_params"]["metadata"])
if kwargs["litellm_params"]["metadata"] is not None:
if metadata:
response = requests.post(
"https://api.promptlayer.com/rest/track-metadata",
json={
"request_id": response_json["request_id"],
"api_key": self.key,
"metadata": kwargs["litellm_params"]["metadata"],
"metadata": metadata,
},
)
print_verbose(
Expand Down
37 changes: 32 additions & 5 deletions litellm/tests/test_promptlayer_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,12 @@
from litellm import completion
import litellm

import pytest

litellm.success_callback = ["promptlayer"]
litellm.set_verbose = True
import time


# def test_promptlayer_logging():
# try:
# # Redirect stdout
Expand Down Expand Up @@ -58,15 +59,41 @@ def test_promptlayer_logging_with_metadata():
sys.stdout = old_stdout
output = new_stdout.getvalue().strip()
print(output)
if "LiteLLM: Prompt Layer Logging: success" not in output:
raise Exception("Required log message not found!")

assert "Prompt Layer Logging: success" in output

except Exception as e:
print(e)
pytest.fail(f"Error occurred: {e}")


test_promptlayer_logging_with_metadata()
def test_promptlayer_logging_with_metadata_tags():
try:
# Redirect stdout
old_stdout = sys.stdout
sys.stdout = new_stdout = io.StringIO()

response = completion(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Hi 👋 - i'm ai21"}],
temperature=0.2,
max_tokens=20,
metadata={"model": "ai21", "pl_tags": ["env:dev"]},
mock_response="this is a mock response"
)

# Restore stdout
time.sleep(1)
sys.stdout = old_stdout
output = new_stdout.getvalue().strip()
print(output)

assert "Prompt Layer Logging: success" in output

except Exception as e:
pytest.fail(f"Error occurred: {e}")

test_promptlayer_logging_with_metadata()
test_promptlayer_logging_with_metadata_tags()

# def test_chat_openai():
# try:
Expand Down