Skip to content

Commit d9954d8

Browse files
Fix 9219: OpenAIAgent using AzureOpenAI not working with stream_chat and astream_chat (#9890)
1 parent 0c19550 commit d9954d8

File tree

1 file changed

+12
-3
lines changed

1 file changed

+12
-3
lines changed

llama_index/llms/openai.py

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313

1414
import httpx
1515
import tiktoken
16-
from openai import AsyncOpenAI
16+
from openai import AsyncOpenAI, AzureOpenAI
1717
from openai import OpenAI as SyncOpenAI
1818
from openai.types.chat.chat_completion_chunk import (
1919
ChatCompletionChunk,
@@ -199,6 +199,9 @@ def _get_model_name(self) -> str:
199199
model_name = model_name.split(":")[1]
200200
return model_name
201201

202+
def _is_azure_client(self) -> bool:
203+
return isinstance(self._get_client(), AzureOpenAI)
204+
202205
@classmethod
203206
def class_name(cls) -> str:
204207
return "openai_llm"
@@ -366,7 +369,10 @@ def gen() -> ChatResponseGen:
366369
if len(response.choices) > 0:
367370
delta = response.choices[0].delta
368371
else:
369-
delta = ChoiceDelta()
372+
if self._is_azure_client():
373+
continue
374+
else:
375+
delta = ChoiceDelta()
370376

371377
# check if this chunk is the start of a function call
372378
if delta.tool_calls:
@@ -565,7 +571,10 @@ async def gen() -> ChatResponseAsyncGen:
565571
continue
566572
delta = response.choices[0].delta
567573
else:
568-
delta = ChoiceDelta()
574+
if self._is_azure_client():
575+
continue
576+
else:
577+
delta = ChoiceDelta()
569578
first_chat_chunk = False
570579

571580
# check if this chunk is the start of a function call

0 commit comments

Comments
 (0)