Skip to content

Commit

Permalink
Fix Khoj responses when code generated charts in response context
Browse files Browse the repository at this point in the history
The current fix should improve Khoj responses when charts in response
context. It truncates code context before sharing with response chat actors.

Previously Khoj would respond with it not being able to create chart
but than have a generated chart in it's response in default mode.

The truncate code context was added to research chat actor for
decision making but it wasn't added to conversation response
generation chat actors.

When khoj generated charts with code for its response, the images in
the context would exceed context window limits.

So the truncation logic to drop all past context, including chat
history, context gathered for current response.

This would result in chat response generator 'forgetting' all for the
current response when code generated images, charts in response context.
  • Loading branch information
debanjum committed Nov 21, 2024
1 parent 5475a26 commit b9a889a
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 7 deletions.
10 changes: 8 additions & 2 deletions src/khoj/processor/conversation/anthropic/anthropic_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,11 @@
generate_chatml_messages_with_context,
messages_to_print,
)
from khoj.utils.helpers import ConversationCommand, is_none_or_empty
from khoj.utils.helpers import (
ConversationCommand,
is_none_or_empty,
truncate_code_context,
)
from khoj.utils.rawconfig import LocationData
from khoj.utils.yaml import yaml_dump

Expand Down Expand Up @@ -197,7 +201,9 @@ def converse_anthropic(
if ConversationCommand.Online in conversation_commands or ConversationCommand.Webpage in conversation_commands:
context_message += f"{prompts.online_search_conversation.format(online_results=yaml_dump(online_results))}\n\n"
if ConversationCommand.Code in conversation_commands and not is_none_or_empty(code_results):
context_message += f"{prompts.code_executed_context.format(code_results=str(code_results))}\n\n"
context_message += (
f"{prompts.code_executed_context.format(code_results=truncate_code_context(code_results))}\n\n"
)
context_message = context_message.strip()

# Setup Prompt with Primer or Conversation History
Expand Down
10 changes: 8 additions & 2 deletions src/khoj/processor/conversation/google/gemini_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,11 @@
generate_chatml_messages_with_context,
messages_to_print,
)
from khoj.utils.helpers import ConversationCommand, is_none_or_empty
from khoj.utils.helpers import (
ConversationCommand,
is_none_or_empty,
truncate_code_context,
)
from khoj.utils.rawconfig import LocationData
from khoj.utils.yaml import yaml_dump

Expand Down Expand Up @@ -208,7 +212,9 @@ def converse_gemini(
if ConversationCommand.Online in conversation_commands or ConversationCommand.Webpage in conversation_commands:
context_message += f"{prompts.online_search_conversation.format(online_results=yaml_dump(online_results))}\n\n"
if ConversationCommand.Code in conversation_commands and not is_none_or_empty(code_results):
context_message += f"{prompts.code_executed_context.format(code_results=str(code_results))}\n\n"
context_message += (
f"{prompts.code_executed_context.format(code_results=truncate_code_context(code_results))}\n\n"
)
context_message = context_message.strip()

# Setup Prompt with Primer or Conversation History
Expand Down
5 changes: 4 additions & 1 deletion src/khoj/processor/conversation/offline/chat_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
in_debug_mode,
is_none_or_empty,
is_promptrace_enabled,
truncate_code_context,
)
from khoj.utils.rawconfig import LocationData
from khoj.utils.yaml import yaml_dump
Expand Down Expand Up @@ -211,7 +212,9 @@ def converse_offline(

context_message += f"{prompts.online_search_conversation_offline.format(online_results=yaml_dump(simplified_online_results))}\n\n"
if ConversationCommand.Code in conversation_commands and not is_none_or_empty(code_results):
context_message += f"{prompts.code_executed_context.format(code_results=str(code_results))}\n\n"
context_message += (
f"{prompts.code_executed_context.format(code_results=truncate_code_context(code_results))}\n\n"
)
context_message = context_message.strip()

# Setup Prompt with Primer or Conversation History
Expand Down
11 changes: 9 additions & 2 deletions src/khoj/processor/conversation/openai/gpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,11 @@
generate_chatml_messages_with_context,
messages_to_print,
)
from khoj.utils.helpers import ConversationCommand, is_none_or_empty
from khoj.utils.helpers import (
ConversationCommand,
is_none_or_empty,
truncate_code_context,
)
from khoj.utils.rawconfig import LocationData
from khoj.utils.yaml import yaml_dump

Expand Down Expand Up @@ -196,7 +200,10 @@ def converse(
if not is_none_or_empty(online_results):
context_message += f"{prompts.online_search_conversation.format(online_results=yaml_dump(online_results))}\n\n"
if not is_none_or_empty(code_results):
context_message += f"{prompts.code_executed_context.format(code_results=str(code_results))}\n\n"
context_message += (
f"{prompts.code_executed_context.format(code_results=truncate_code_context(code_results))}\n\n"
)

context_message = context_message.strip()

# Setup Prompt with Primer or Conversation History
Expand Down

0 comments on commit b9a889a

Please sign in to comment.