Skip to content

Commit

Permalink
fix: ark token usage is none
Browse files Browse the repository at this point in the history
  • Loading branch information
sinomoe committed Sep 13, 2024
1 parent d109881 commit 8f70139
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -208,11 +208,9 @@ def stream_chat(
presence_penalty=presence_penalty,
top_p=top_p,
temperature=temperature,
stream_options={"include_usage": True},
)
for chunk in chunks:
if not chunk.choices:
continue
yield chunk
yield from chunks

def embeddings(self, texts: list[str]) -> CreateEmbeddingResponse:
return self.ark.embeddings.create(model=self.endpoint_id, input=texts)
Original file line number Diff line number Diff line change
Expand Up @@ -241,16 +241,14 @@ def _generate_v3(

def _handle_stream_chat_response(chunks: Generator[ChatCompletionChunk]) -> Generator:
for chunk in chunks:
if not chunk.choices:
continue
choice = chunk.choices[0]

yield LLMResultChunk(
model=model,
prompt_messages=prompt_messages,
delta=LLMResultChunkDelta(
index=choice.index,
message=AssistantPromptMessage(content=choice.delta.content, tool_calls=[]),
index=0,
message=AssistantPromptMessage(
content=chunk.choices[0].delta.content if chunk.choices else "", tool_calls=[]
),
usage=self._calc_response_usage(
model=model,
credentials=credentials,
Expand All @@ -259,7 +257,7 @@ def _handle_stream_chat_response(chunks: Generator[ChatCompletionChunk]) -> Gene
)
if chunk.usage
else None,
finish_reason=choice.finish_reason,
finish_reason=chunk.choices[0].finish_reason if chunk.choices else None,
),
)

Expand Down

0 comments on commit 8f70139

Please sign in to comment.