Skip to content

Commit

Permalink
Merge branch 'main' into grit-prod
Browse files Browse the repository at this point in the history
  • Loading branch information
morgante committed Mar 23, 2024
2 parents 742f995 + 05dfc9e commit c73b918
Show file tree
Hide file tree
Showing 5 changed files with 20 additions and 47 deletions.
7 changes: 0 additions & 7 deletions litellm/llms/prompt_templates/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -700,13 +700,6 @@ def anthropic_messages_pt(messages: list):
if assistant_content:
new_messages.append({"role": "assistant", "content": assistant_content})

if (
msg_i < len(messages)
and messages[msg_i]["role"] != user_message_types
and messages[msg_i]["role"] != "assistant"
):
raise Exception(f"Invalid role passed in - {messages[msg_i]}")

if new_messages[0]["role"] != "user":
if litellm.modify_params:
new_messages.insert(
Expand Down
1 change: 1 addition & 0 deletions litellm/tests/test_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,7 @@ def test_completion_claude_3_function_call():

def test_completion_claude_3_multi_turn_conversations():
litellm.set_verbose = True
litellm.modify_params = True
messages = [
{"role": "assistant", "content": "?"}, # test first user message auto injection
{"role": "user", "content": "Hi!"},
Expand Down
15 changes: 15 additions & 0 deletions litellm/tests/test_python_38.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import sys, os, time
import traceback, asyncio
import pytest
import subprocess

sys.path.insert(
0, os.path.abspath("../..")
Expand All @@ -16,3 +17,17 @@ def test_using_litellm():
pytest.fail(
f"Error occurred: {e}. Installing litellm on python3.8 failed please retry"
)


def test_litellm_proxy_server():
# Install the litellm[proxy] package
subprocess.run(["pip", "install", "litellm[proxy]"])

# Import the proxy_server module
try:
import litellm.proxy.proxy_server
except ImportError:
pytest.fail("Failed to import litellm.proxy_server")

# Assertion to satisfy the test, you can add other checks as needed
assert True
34 changes: 0 additions & 34 deletions litellm/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2358,8 +2358,6 @@ def function_setup(
)
if "logger_fn" in kwargs:
user_logger_fn = kwargs["logger_fn"]
# CRASH REPORTING TELEMETRY
crash_reporting(*args, **kwargs)
# INIT LOGGER - for user-specified integrations
model = args[0] if len(args) > 0 else kwargs.get("model", None)
call_type = original_function.__name__
Expand Down Expand Up @@ -2464,25 +2462,6 @@ def post_call_processing(original_response, model):
except Exception as e:
raise e

def crash_reporting(*args, **kwargs):
if litellm.telemetry:
try:
model = args[0] if len(args) > 0 else kwargs["model"]
exception = kwargs["exception"] if "exception" in kwargs else None
custom_llm_provider = (
kwargs["custom_llm_provider"]
if "custom_llm_provider" in kwargs
else None
)
safe_crash_reporting(
model=model,
exception=exception,
custom_llm_provider=custom_llm_provider,
) # log usage-crash details. Do not log any user details. If you want to turn this off, set `litellm.telemetry=False`.
except:
# [Non-Blocking Error]
pass

@wraps(original_function)
def wrapper(*args, **kwargs):
# Prints Exactly what was passed to litellm function - don't execute any logic here - it should just print
Expand Down Expand Up @@ -2777,7 +2756,6 @@ def wrapper(*args, **kwargs):
kwargs["model"] = context_window_fallback_dict[model]
return original_function(*args, **kwargs)
traceback_exception = traceback.format_exc()
crash_reporting(*args, **kwargs, exception=traceback_exception)
end_time = datetime.datetime.now()
# LOG FAILURE - handle streaming failure logging in the _next_ object, remove `handle_failure` once it's deprecated
if logging_obj:
Expand Down Expand Up @@ -3199,7 +3177,6 @@ async def wrapper_async(*args, **kwargs):
return result
except Exception as e:
traceback_exception = traceback.format_exc()
crash_reporting(*args, **kwargs, exception=traceback_exception)
end_time = datetime.datetime.now()
if logging_obj:
try:
Expand Down Expand Up @@ -8287,17 +8264,6 @@ def exception_type(
raise original_exception


####### CRASH REPORTING ################
def safe_crash_reporting(model=None, exception=None, custom_llm_provider=None):
data = {
"model": model,
"exception": str(exception),
"custom_llm_provider": custom_llm_provider,
}
executor.submit(litellm_telemetry, data)
# threading.Thread(target=litellm_telemetry, args=(data,), daemon=True).start()


def get_or_generate_uuid():
temp_dir = os.path.join(os.path.abspath(os.sep), "tmp")
uuid_file = os.path.join(temp_dir, "litellm_uuid.txt")
Expand Down
10 changes: 4 additions & 6 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "litellm"
version = "1.33.5"
version = "1.33.8"
description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"]
license = "MIT"
Expand Down Expand Up @@ -31,11 +31,10 @@ pyyaml = {version = "^6.0.1", optional = true}
rq = {version = "*", optional = true}
orjson = {version = "^3.9.7", optional = true}
apscheduler = {version = "^3.10.4", optional = true}
streamlit = {version = "^1.29.0", optional = true}
fastapi-sso = { version = "^0.10.0", optional = true }
PyJWT = { version = "^2.8.0", optional = true }
python-multipart = { version = "^0.0.6", optional = true }
argon2-cffi = { version = "^23.1.0", optional = true }
cryptography = { version = "41.0.3", optional = true }

[tool.poetry.extras]
proxy = [
Expand All @@ -50,15 +49,14 @@ proxy = [
"fastapi-sso",
"PyJWT",
"python-multipart",
"argon2-cffi",
"cryptography"
]

extra_proxy = [
"prisma",
"azure-identity",
"azure-keyvault-secrets",
"google-cloud-kms",
"streamlit",
"resend"
]

Expand All @@ -77,7 +75,7 @@ requires = ["poetry-core", "wheel"]
build-backend = "poetry.core.masonry.api"

[tool.commitizen]
version = "1.33.5"
version = "1.33.8"
version_files = [
"pyproject.toml:^version"
]
Expand Down

0 comments on commit c73b918

Please sign in to comment.