Skip to content

Commit 457f5d7

Browse files
SDK regeneration (#509)
Co-authored-by: fern-api <115122769+fern-api[bot]@users.noreply.github.com>
1 parent a5e9f0c commit 457f5d7

File tree

8 files changed

+80
-32
lines changed

8 files changed

+80
-32
lines changed

poetry.lock

Lines changed: 10 additions & 10 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "cohere"
3-
version = "5.5.2"
3+
version = "5.5.3"
44
description = ""
55
readme = "README.md"
66
authors = []

src/cohere/base_client.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@
6565

6666
class BaseCohere:
6767
"""
68-
Use this class to access the different functions within the SDK. You can instantiate any number of clients with different configuration that will propogate to these functions.
68+
Use this class to access the different functions within the SDK. You can instantiate any number of clients with different configuration that will propagate to these functions.
6969
7070
Parameters
7171
----------
@@ -164,7 +164,7 @@ def chat_stream(
164164
) -> typing.Iterator[StreamedChatResponse]:
165165
"""
166166
Generates a text response to a user message.
167-
To learn how to use Chat with Streaming and RAG follow [this guide](https://docs.cohere.com/docs/cochat-beta#various-ways-of-using-the-chat-endpoint).
167+
To learn how to use the Chat API with Streaming and RAG follow our [Text Generation guides](https://docs.cohere.com/docs/chat-api).
168168
169169
Parameters
170170
----------
@@ -460,7 +460,7 @@ def chat_stream(
460460
for chunk in response:
461461
yield chunk
462462
"""
463-
_request: typing.Dict[str, typing.Any] = {"message": message}
463+
_request: typing.Dict[str, typing.Any] = {"message": message, "stream": True}
464464
if model is not OMIT:
465465
_request["model"] = model
466466
if preamble is not OMIT:
@@ -583,7 +583,7 @@ def chat(
583583
) -> NonStreamedChatResponse:
584584
"""
585585
Generates a text response to a user message.
586-
To learn how to use Chat with Streaming and RAG follow [this guide](https://docs.cohere.com/docs/cochat-beta#various-ways-of-using-the-chat-endpoint).
586+
To learn how to use the Chat API with Streaming and RAG follow our [Text Generation guides](https://docs.cohere.com/docs/chat-api).
587587
588588
Parameters
589589
----------
@@ -803,7 +803,7 @@ def chat(
803803
temperature=0.3,
804804
)
805805
"""
806-
_request: typing.Dict[str, typing.Any] = {"message": message}
806+
_request: typing.Dict[str, typing.Any] = {"message": message, "stream": False}
807807
if model is not OMIT:
808808
_request["model"] = model
809809
if preamble is not OMIT:
@@ -1038,7 +1038,7 @@ def generate_stream(
10381038
for chunk in response:
10391039
yield chunk
10401040
"""
1041-
_request: typing.Dict[str, typing.Any] = {"prompt": prompt}
1041+
_request: typing.Dict[str, typing.Any] = {"prompt": prompt, "stream": True}
10421042
if model is not OMIT:
10431043
_request["model"] = model
10441044
if num_generations is not OMIT:
@@ -1253,7 +1253,7 @@ def generate(
12531253
prompt="Please explain to me how LLMs work",
12541254
)
12551255
"""
1256-
_request: typing.Dict[str, typing.Any] = {"prompt": prompt}
1256+
_request: typing.Dict[str, typing.Any] = {"prompt": prompt, "stream": False}
12571257
if model is not OMIT:
12581258
_request["model"] = model
12591259
if num_generations is not OMIT:
@@ -2084,7 +2084,7 @@ def check_api_key(self, *, request_options: typing.Optional[RequestOptions] = No
20842084

20852085
class AsyncBaseCohere:
20862086
"""
2087-
Use this class to access the different functions within the SDK. You can instantiate any number of clients with different configuration that will propogate to these functions.
2087+
Use this class to access the different functions within the SDK. You can instantiate any number of clients with different configuration that will propagate to these functions.
20882088
20892089
Parameters
20902090
----------
@@ -2183,7 +2183,7 @@ async def chat_stream(
21832183
) -> typing.AsyncIterator[StreamedChatResponse]:
21842184
"""
21852185
Generates a text response to a user message.
2186-
To learn how to use Chat with Streaming and RAG follow [this guide](https://docs.cohere.com/docs/cochat-beta#various-ways-of-using-the-chat-endpoint).
2186+
To learn how to use the Chat API with Streaming and RAG follow our [Text Generation guides](https://docs.cohere.com/docs/chat-api).
21872187
21882188
Parameters
21892189
----------
@@ -2479,7 +2479,7 @@ async def chat_stream(
24792479
async for chunk in response:
24802480
yield chunk
24812481
"""
2482-
_request: typing.Dict[str, typing.Any] = {"message": message}
2482+
_request: typing.Dict[str, typing.Any] = {"message": message, "stream": True}
24832483
if model is not OMIT:
24842484
_request["model"] = model
24852485
if preamble is not OMIT:
@@ -2602,7 +2602,7 @@ async def chat(
26022602
) -> NonStreamedChatResponse:
26032603
"""
26042604
Generates a text response to a user message.
2605-
To learn how to use Chat with Streaming and RAG follow [this guide](https://docs.cohere.com/docs/cochat-beta#various-ways-of-using-the-chat-endpoint).
2605+
To learn how to use the Chat API with Streaming and RAG follow our [Text Generation guides](https://docs.cohere.com/docs/chat-api).
26062606
26072607
Parameters
26082608
----------
@@ -2822,7 +2822,7 @@ async def chat(
28222822
temperature=0.3,
28232823
)
28242824
"""
2825-
_request: typing.Dict[str, typing.Any] = {"message": message}
2825+
_request: typing.Dict[str, typing.Any] = {"message": message, "stream": False}
28262826
if model is not OMIT:
28272827
_request["model"] = model
28282828
if preamble is not OMIT:
@@ -3057,7 +3057,7 @@ async def generate_stream(
30573057
async for chunk in response:
30583058
yield chunk
30593059
"""
3060-
_request: typing.Dict[str, typing.Any] = {"prompt": prompt}
3060+
_request: typing.Dict[str, typing.Any] = {"prompt": prompt, "stream": True}
30613061
if model is not OMIT:
30623062
_request["model"] = model
30633063
if num_generations is not OMIT:
@@ -3272,7 +3272,7 @@ async def generate(
32723272
prompt="Please explain to me how LLMs work",
32733273
)
32743274
"""
3275-
_request: typing.Dict[str, typing.Any] = {"prompt": prompt}
3275+
_request: typing.Dict[str, typing.Any] = {"prompt": prompt, "stream": False}
32763276
if model is not OMIT:
32773277
_request["model"] = model
32783278
if num_generations is not OMIT:

src/cohere/connectors/client.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -362,6 +362,9 @@ def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] =
362362
request_options.get("additional_query_parameters") if request_options is not None else None
363363
)
364364
),
365+
json=jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))
366+
if request_options is not None
367+
else None,
365368
headers=jsonable_encoder(
366369
remove_none_from_dict(
367370
{
@@ -968,6 +971,9 @@ async def delete(
968971
request_options.get("additional_query_parameters") if request_options is not None else None
969972
)
970973
),
974+
json=jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))
975+
if request_options is not None
976+
else None,
971977
headers=jsonable_encoder(
972978
remove_none_from_dict(
973979
{

src/cohere/core/client_wrapper.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ def get_headers(self) -> typing.Dict[str, str]:
2525
headers: typing.Dict[str, str] = {
2626
"X-Fern-Language": "Python",
2727
"X-Fern-SDK-Name": "cohere",
28-
"X-Fern-SDK-Version": "5.5.2",
28+
"X-Fern-SDK-Version": "5.5.3",
2929
}
3030
if self._client_name is not None:
3131
headers["X-Client-Name"] = self._client_name

src/cohere/core/unchecked_base_model.py

Lines changed: 36 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import typing_extensions
99

1010
from .datetime_utils import serialize_datetime
11-
from .pydantic_utilities import pydantic_v1
11+
from .pydantic_utilities import IS_PYDANTIC_V2, pydantic_v1
1212

1313

1414
class UnionMetadata:
@@ -60,6 +60,7 @@ def construct(
6060
else:
6161
type_ = typing.cast(typing.Type, field.outer_type_) # type: ignore
6262
fields_values[name] = construct_type(object_=values[key], type_=type_)
63+
_fields_set.add(name)
6364
elif not field.required:
6465
default = field.get_default()
6566
fields_values[name] = default
@@ -71,10 +72,19 @@ def construct(
7172
_fields_set.add(key)
7273

7374
# Add extras back in
75+
_extra = {}
7476
for key, value in values.items():
75-
if key not in cls.__fields__:
76-
_fields_set.add(key)
77-
fields_values[key] = value
77+
if key not in _fields_set:
78+
_extra[key] = value
79+
# In v2 we'll need to exclude extra fields from fields_values
80+
if not IS_PYDANTIC_V2:
81+
_fields_set.add(key)
82+
fields_values[key] = value
83+
84+
if IS_PYDANTIC_V2:
85+
object.__setattr__(m, "__pydantic_private__", None)
86+
object.__setattr__(m, "__pydantic_extra__", _extra)
87+
object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
7888

7989
object.__setattr__(m, "__dict__", fields_values)
8090
object.__setattr__(m, "__fields_set__", _fields_set)
@@ -144,8 +154,12 @@ def construct_type(*, type_: typing.Type[typing.Any], object_: typing.Any) -> ty
144154
if not isinstance(object_, typing.Mapping):
145155
return object_
146156

147-
_, items_type = pydantic_v1.typing.get_args(type_)
148-
return {key: construct_type(object_=item, type_=items_type) for key, item in object_.items()}
157+
key_type, items_type = pydantic_v1.typing.get_args(type_)
158+
d = {
159+
construct_type(object_=key, type_=key_type): construct_type(object_=item, type_=items_type)
160+
for key, item in object_.items()
161+
}
162+
return d
149163

150164
if base_type == list:
151165
if not isinstance(object_, list):
@@ -190,4 +204,20 @@ def construct_type(*, type_: typing.Type[typing.Any], object_: typing.Any) -> ty
190204
except Exception:
191205
return object_
192206

207+
if base_type == int:
208+
try:
209+
return int(object_)
210+
except Exception:
211+
return object_
212+
213+
if base_type == bool:
214+
try:
215+
if isinstance(object_, str):
216+
stringified_object = object_.lower()
217+
return stringified_object == "true" or stringified_object == "1"
218+
219+
return bool(object_)
220+
except Exception:
221+
return object_
222+
193223
return object_

src/cohere/datasets/client.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -422,6 +422,9 @@ def delete(
422422
request_options.get("additional_query_parameters") if request_options is not None else None
423423
)
424424
),
425+
json=jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))
426+
if request_options is not None
427+
else None,
425428
headers=jsonable_encoder(
426429
remove_none_from_dict(
427430
{
@@ -844,6 +847,9 @@ async def delete(
844847
request_options.get("additional_query_parameters") if request_options is not None else None
845848
)
846849
),
850+
json=jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))
851+
if request_options is not None
852+
else None,
847853
headers=jsonable_encoder(
848854
remove_none_from_dict(
849855
{

src/cohere/finetuning/client.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -368,6 +368,9 @@ def delete_finetuned_model(
368368
request_options.get("additional_query_parameters") if request_options is not None else None
369369
)
370370
),
371+
json=jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))
372+
if request_options is not None
373+
else None,
371374
headers=jsonable_encoder(
372375
remove_none_from_dict(
373376
{
@@ -1127,6 +1130,9 @@ async def delete_finetuned_model(
11271130
request_options.get("additional_query_parameters") if request_options is not None else None
11281131
)
11291132
),
1133+
json=jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))
1134+
if request_options is not None
1135+
else None,
11301136
headers=jsonable_encoder(
11311137
remove_none_from_dict(
11321138
{

0 commit comments

Comments
 (0)