Skip to content

Commit

Permalink
merge
Browse files Browse the repository at this point in the history
  • Loading branch information
khlevin committed Apr 1, 2024
2 parents 17047e3 + 3794434 commit cd17e06
Show file tree
Hide file tree
Showing 12 changed files with 719 additions and 588 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ dependencies = [
"traitlets>=5.14.1",
"ipdb>=0.13.13",
"ipython>=8.18.1",
"litellm>=1.26.6",
"litellm>=1.34.12",
"PyYAML>=6.0.1",
"ipyflow>=0.0.130",
"numpy>=1.26.3"
Expand Down
34 changes: 5 additions & 29 deletions src/chatdbg/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
from chatdbg.chatdbg_pdb import ChatDBG
from chatdbg.util.config import chatdbg_config
import sys
import getopt

_usage = """\
usage: python -m ipdb [-m] [-c command] ... pyfile [arg] ...
Expand All @@ -20,43 +19,20 @@
Option -m is available only in Python 3.7 and later.
ChatDBG-specific options may appear anywhere before pyfile:
--debug dump the LLM messages to a chatdbg.log
--log file where to write the log of the debugging session
--model model the LLM model to use.
--stream stream responses from the LLM
"""


def main():
ipdb.__main__._get_debugger_cls = lambda: ChatDBG

opts, args = getopt.getopt(
sys.argv[1:], "mhc:", ["help", "debug", "log=", "model=", "stream", "command="]
)
pdb_args = [sys.argv[0]]
for opt, optarg in opts:
if opt in ["-h", "--help"]:
print(_usage)
sys.exit()
elif opt in ["--debug"]:
chatdbg_config.debug = True
elif opt in ["--stream"]:
chatdbg_config.stream = True
elif opt in ["--model"]:
chatdbg_config.model = optarg
elif opt in ["--log"]:
chatdbg_config.model = optarg
elif opt in ["-c", "--command"]:
pdb_args += [opt, optarg]
elif opt in ["-m"]:
pdb_args = [opt]
args = chatdbg_config.parse_user_flags(sys.argv[1:])

if not args:
if "-h" in args or "--help" in args:
print(_usage)
sys.exit(2)
print(chatdbg_config.user_flags_help())
sys.exit()

sys.argv = pdb_args + args
sys.argv = [sys.argv[0]] + args

ipdb.__main__.main()

Expand Down
21 changes: 18 additions & 3 deletions src/chatdbg/assistant/assistant.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@ def __init__(
debug=False,
stream=False,
):

# Hide their debugging info -- it messes with our error handling
litellm.suppress_debug_info = True

if debug:
log_file = open(f"chatdbg.log", "w")
self._logger = lambda model_call_dict: print(
Expand Down Expand Up @@ -91,13 +95,24 @@ def _check_model(self):
)
sys.exit(1)

if not litellm.supports_function_calling(self._model):
try:
if not litellm.supports_function_calling(self._model):
self._broadcast(
"on_fail",
textwrap.dedent(
f"""\
The {self._model} model does not support function calls.
You must use a model that does, eg. gpt-4."""
),
)
sys.exit(1)
except:
self._broadcast(
"on_fail",
textwrap.dedent(
f"""\
The {self._model} model does not support function calls.
You must use a model that does, eg. gpt-4."""
{self._model} does not appear to be a supported model.
See https://docs.litellm.ai/docs/providers"""
),
)
sys.exit(1)
Expand Down
Loading

0 comments on commit cd17e06

Please sign in to comment.