Skip to content

Commit

Permalink
fix: ark token usage is none (langgenius#8351)
Browse files Browse the repository at this point in the history
  • Loading branch information
sinomoe authored and JunXu01 committed Nov 9, 2024
1 parent dc11fe2 commit 247869c
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -208,11 +208,9 @@ def stream_chat(
presence_penalty=presence_penalty,
top_p=top_p,
temperature=temperature,
stream_options={"include_usage": True},
)
for chunk in chunks:
if not chunk.choices:
continue
yield chunk
yield from chunks

def embeddings(self, texts: list[str]) -> CreateEmbeddingResponse:
return self.ark.embeddings.create(model=self.endpoint_id, input=texts)
Original file line number Diff line number Diff line change
Expand Up @@ -239,16 +239,14 @@ def _generate_v3(

def _handle_stream_chat_response(chunks: Generator[ChatCompletionChunk]) -> Generator:
for chunk in chunks:
if not chunk.choices:
continue
choice = chunk.choices[0]

yield LLMResultChunk(
model=model,
prompt_messages=prompt_messages,
delta=LLMResultChunkDelta(
index=choice.index,
message=AssistantPromptMessage(content=choice.delta.content, tool_calls=[]),
index=0,
message=AssistantPromptMessage(
content=chunk.choices[0].delta.content if chunk.choices else "", tool_calls=[]
),
usage=self._calc_response_usage(
model=model,
credentials=credentials,
Expand All @@ -257,7 +255,7 @@ def _handle_stream_chat_response(chunks: Generator[ChatCompletionChunk]) -> Gene
)
if chunk.usage
else None,
finish_reason=choice.finish_reason,
finish_reason=chunk.choices[0].finish_reason if chunk.choices else None,
),
)

Expand Down

0 comments on commit 247869c

Please sign in to comment.