Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@
update_tool_calls,
is_json_schema_supported,
)
from openai import AsyncOpenAI, AzureOpenAI, AsyncAzureOpenAI
from openai import AsyncOpenAI
from openai import OpenAI as SyncOpenAI
from openai.types.chat.chat_completion_chunk import (
ChatCompletionChunk,
Expand Down Expand Up @@ -532,10 +532,12 @@ def gen() -> ChatResponseGen:
if len(response.choices) > 0:
delta = response.choices[0].delta
else:
if isinstance(client, AzureOpenAI):
# Choices could be empty if we receive usage details or content filtering result.
# In the case of usage details, we simply generate an empty delta. The usage delta comes in the last
# chunk of the response and avoid yielding in other cases.
if response.usage is None:
continue
else:
delta = ChoiceDelta()
delta = ChoiceDelta()

if delta is None:
continue
Expand Down Expand Up @@ -801,10 +803,12 @@ async def gen() -> ChatResponseAsyncGen:
continue
delta = response.choices[0].delta
else:
if isinstance(aclient, AsyncAzureOpenAI):
# Choices could be empty if we receive usage details or content filtering result.
# In the case of usage details, we simply generate an empty delta. The usage delta comes in the last
# chunk of the response and avoid yielding in other cases.
if response.usage is None:
continue
else:
delta = ChoiceDelta()
delta = ChoiceDelta()
first_chat_chunk = False

if delta is None:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ dev = [

[project]
name = "llama-index-llms-openai"
version = "0.5.2"
version = "0.5.3"
description = "llama-index llms openai integration"
authors = [{name = "llama-index"}]
requires-python = ">=3.9,<4.0"
Expand Down
Loading