Skip to content

Commit 573d90d

Browse files
🌿 Fern Regeneration -- January 15, 2025 (#224)
Co-authored-by: fern-api <115122769+fern-api[bot]@users.noreply.github.com> Co-authored-by: twitchard <[email protected]>
1 parent 8c99227 commit 573d90d

File tree

8 files changed

+117
-45
lines changed

8 files changed

+117
-45
lines changed

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
[project]
22
name = "hume"
3+
34
[tool.poetry]
45
name = "hume"
56
version = "0.7.6"

src/hume/core/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from .api_error import ApiError
44
from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper
55
from .datetime_utils import serialize_datetime
6-
from .file import File, convert_file_dict_to_httpx_tuples
6+
from .file import File, convert_file_dict_to_httpx_tuples, with_content_type
77
from .http_client import AsyncHttpClient, HttpClient
88
from .jsonable_encoder import jsonable_encoder
99
from .pagination import AsyncPager, SyncPager
@@ -46,4 +46,5 @@
4646
"universal_field_validator",
4747
"universal_root_validator",
4848
"update_forward_refs",
49+
"with_content_type",
4950
]

src/hume/core/client_wrapper.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -42,9 +42,9 @@ def __init__(
4242
super().__init__(api_key=api_key, base_url=base_url, timeout=timeout)
4343
self.httpx_client = HttpClient(
4444
httpx_client=httpx_client,
45-
base_headers=self.get_headers(),
46-
base_timeout=self.get_timeout(),
47-
base_url=self.get_base_url(),
45+
base_headers=lambda: self.get_headers(),
46+
base_timeout=lambda: self.get_timeout(),
47+
base_url=lambda: self.get_base_url(),
4848
)
4949

5050

@@ -60,7 +60,7 @@ def __init__(
6060
super().__init__(api_key=api_key, base_url=base_url, timeout=timeout)
6161
self.httpx_client = AsyncHttpClient(
6262
httpx_client=httpx_client,
63-
base_headers=self.get_headers(),
64-
base_timeout=self.get_timeout(),
65-
base_url=self.get_base_url(),
63+
base_headers=lambda: self.get_headers(),
64+
base_timeout=lambda: self.get_timeout(),
65+
base_url=lambda: self.get_base_url(),
6666
)

src/hume/core/file.py

Lines changed: 30 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,30 +1,30 @@
11
# This file was auto-generated by Fern from our API Definition.
22

3-
import typing
3+
from typing import IO, Dict, List, Mapping, Optional, Tuple, Union, cast
44

55
# File typing inspired by the flexibility of types within the httpx library
66
# https://github.com/encode/httpx/blob/master/httpx/_types.py
7-
FileContent = typing.Union[typing.IO[bytes], bytes, str]
8-
File = typing.Union[
7+
FileContent = Union[IO[bytes], bytes, str]
8+
File = Union[
99
# file (or bytes)
1010
FileContent,
1111
# (filename, file (or bytes))
12-
typing.Tuple[typing.Optional[str], FileContent],
12+
Tuple[Optional[str], FileContent],
1313
# (filename, file (or bytes), content_type)
14-
typing.Tuple[typing.Optional[str], FileContent, typing.Optional[str]],
14+
Tuple[Optional[str], FileContent, Optional[str]],
1515
# (filename, file (or bytes), content_type, headers)
16-
typing.Tuple[
17-
typing.Optional[str],
16+
Tuple[
17+
Optional[str],
1818
FileContent,
19-
typing.Optional[str],
20-
typing.Mapping[str, str],
19+
Optional[str],
20+
Mapping[str, str],
2121
],
2222
]
2323

2424

2525
def convert_file_dict_to_httpx_tuples(
26-
d: typing.Dict[str, typing.Union[File, typing.List[File]]],
27-
) -> typing.List[typing.Tuple[str, File]]:
26+
d: Dict[str, Union[File, List[File]]],
27+
) -> List[Tuple[str, File]]:
2828
"""
2929
The format we use is a list of tuples, where the first element is the
3030
name of the file and the second is the file object. Typically HTTPX wants
@@ -41,3 +41,22 @@ def convert_file_dict_to_httpx_tuples(
4141
else:
4242
httpx_tuples.append((key, file_like))
4343
return httpx_tuples
44+
45+
46+
def with_content_type(*, file: File, content_type: str) -> File:
47+
""" """
48+
if isinstance(file, tuple):
49+
if len(file) == 2:
50+
filename, content = cast(Tuple[Optional[str], FileContent], file) # type: ignore
51+
return (filename, content, content_type)
52+
elif len(file) == 3:
53+
filename, content, _ = cast(Tuple[Optional[str], FileContent, Optional[str]], file) # type: ignore
54+
return (filename, content, content_type)
55+
elif len(file) == 4:
56+
filename, content, _, headers = cast( # type: ignore
57+
Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], file
58+
)
59+
return (filename, content, content_type, headers)
60+
else:
61+
raise ValueError(f"Unexpected tuple length: {len(file)}")
62+
return (None, file, content_type)

src/hume/core/http_client.py

Lines changed: 28 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -152,17 +152,20 @@ def __init__(
152152
self,
153153
*,
154154
httpx_client: httpx.Client,
155-
base_timeout: typing.Optional[float],
156-
base_headers: typing.Dict[str, str],
157-
base_url: typing.Optional[str] = None,
155+
base_timeout: typing.Callable[[], typing.Optional[float]],
156+
base_headers: typing.Callable[[], typing.Dict[str, str]],
157+
base_url: typing.Optional[typing.Callable[[], str]] = None,
158158
):
159159
self.base_url = base_url
160160
self.base_timeout = base_timeout
161161
self.base_headers = base_headers
162162
self.httpx_client = httpx_client
163163

164164
def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
165-
base_url = self.base_url if maybe_base_url is None else maybe_base_url
165+
base_url = maybe_base_url
166+
if self.base_url is not None and base_url is None:
167+
base_url = self.base_url()
168+
166169
if base_url is None:
167170
raise ValueError("A base_url is required to make this request, please provide one and try again.")
168171
return base_url
@@ -187,7 +190,7 @@ def request(
187190
timeout = (
188191
request_options.get("timeout_in_seconds")
189192
if request_options is not None and request_options.get("timeout_in_seconds") is not None
190-
else self.base_timeout
193+
else self.base_timeout()
191194
)
192195

193196
json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
@@ -198,7 +201,7 @@ def request(
198201
headers=jsonable_encoder(
199202
remove_none_from_dict(
200203
{
201-
**self.base_headers,
204+
**self.base_headers(),
202205
**(headers if headers is not None else {}),
203206
**(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
204207
}
@@ -224,7 +227,9 @@ def request(
224227
json=json_body,
225228
data=data_body,
226229
content=content,
227-
files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None,
230+
files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files))
231+
if (files is not None and files is not omit)
232+
else None,
228233
timeout=timeout,
229234
)
230235

@@ -269,7 +274,7 @@ def stream(
269274
timeout = (
270275
request_options.get("timeout_in_seconds")
271276
if request_options is not None and request_options.get("timeout_in_seconds") is not None
272-
else self.base_timeout
277+
else self.base_timeout()
273278
)
274279

275280
json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
@@ -280,7 +285,7 @@ def stream(
280285
headers=jsonable_encoder(
281286
remove_none_from_dict(
282287
{
283-
**self.base_headers,
288+
**self.base_headers(),
284289
**(headers if headers is not None else {}),
285290
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
286291
}
@@ -306,7 +311,9 @@ def stream(
306311
json=json_body,
307312
data=data_body,
308313
content=content,
309-
files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None,
314+
files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files))
315+
if (files is not None and files is not omit)
316+
else None,
310317
timeout=timeout,
311318
) as stream:
312319
yield stream
@@ -317,17 +324,20 @@ def __init__(
317324
self,
318325
*,
319326
httpx_client: httpx.AsyncClient,
320-
base_timeout: typing.Optional[float],
321-
base_headers: typing.Dict[str, str],
322-
base_url: typing.Optional[str] = None,
327+
base_timeout: typing.Callable[[], typing.Optional[float]],
328+
base_headers: typing.Callable[[], typing.Dict[str, str]],
329+
base_url: typing.Optional[typing.Callable[[], str]] = None,
323330
):
324331
self.base_url = base_url
325332
self.base_timeout = base_timeout
326333
self.base_headers = base_headers
327334
self.httpx_client = httpx_client
328335

329336
def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
330-
base_url = self.base_url if maybe_base_url is None else maybe_base_url
337+
base_url = maybe_base_url
338+
if self.base_url is not None and base_url is None:
339+
base_url = self.base_url()
340+
331341
if base_url is None:
332342
raise ValueError("A base_url is required to make this request, please provide one and try again.")
333343
return base_url
@@ -352,7 +362,7 @@ async def request(
352362
timeout = (
353363
request_options.get("timeout_in_seconds")
354364
if request_options is not None and request_options.get("timeout_in_seconds") is not None
355-
else self.base_timeout
365+
else self.base_timeout()
356366
)
357367

358368
json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
@@ -364,7 +374,7 @@ async def request(
364374
headers=jsonable_encoder(
365375
remove_none_from_dict(
366376
{
367-
**self.base_headers,
377+
**self.base_headers(),
368378
**(headers if headers is not None else {}),
369379
**(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
370380
}
@@ -434,7 +444,7 @@ async def stream(
434444
timeout = (
435445
request_options.get("timeout_in_seconds")
436446
if request_options is not None and request_options.get("timeout_in_seconds") is not None
437-
else self.base_timeout
447+
else self.base_timeout()
438448
)
439449

440450
json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
@@ -445,7 +455,7 @@ async def stream(
445455
headers=jsonable_encoder(
446456
remove_none_from_dict(
447457
{
448-
**self.base_headers,
458+
**self.base_headers(),
449459
**(headers if headers is not None else {}),
450460
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
451461
}

src/hume/core/pydantic_utilities.py

Lines changed: 30 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -97,15 +97,15 @@ class Config:
9797

9898
@classmethod
9999
def model_construct(
100-
cls: type[Model], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any
101-
) -> Model:
100+
cls: typing.Type["Model"], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any
101+
) -> "Model":
102102
dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read")
103103
return cls.construct(_fields_set, **dealiased_object)
104104

105105
@classmethod
106106
def construct(
107-
cls: type[Model], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any
108-
) -> Model:
107+
cls: typing.Type["Model"], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any
108+
) -> "Model":
109109
dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read")
110110
if IS_PYDANTIC_V2:
111111
return super().model_construct(_fields_set, **dealiased_object) # type: ignore # Pydantic v2
@@ -152,7 +152,7 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
152152
)
153153

154154
else:
155-
_fields_set = self.__fields_set__
155+
_fields_set = self.__fields_set__.copy()
156156

157157
fields = _get_model_fields(self.__class__)
158158
for name, field in fields.items():
@@ -162,9 +162,12 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
162162
# If the default values are non-null act like they've been set
163163
# This effectively allows exclude_unset to work like exclude_none where
164164
# the latter passes through intentionally set none values.
165-
if default != None:
165+
if default is not None or ("exclude_unset" in kwargs and not kwargs["exclude_unset"]):
166166
_fields_set.add(name)
167167

168+
if default is not None:
169+
self.__fields_set__.add(name)
170+
168171
kwargs_with_defaults_exclude_unset_include_fields: typing.Any = {
169172
"by_alias": True,
170173
"exclude_unset": True,
@@ -177,13 +180,33 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
177180
return convert_and_respect_annotation_metadata(object_=dict_dump, annotation=self.__class__, direction="write")
178181

179182

183+
def _union_list_of_pydantic_dicts(
184+
source: typing.List[typing.Any], destination: typing.List[typing.Any]
185+
) -> typing.List[typing.Any]:
186+
converted_list: typing.List[typing.Any] = []
187+
for i, item in enumerate(source):
188+
destination_value = destination[i] # type: ignore
189+
if isinstance(item, dict):
190+
converted_list.append(deep_union_pydantic_dicts(item, destination_value))
191+
elif isinstance(item, list):
192+
converted_list.append(_union_list_of_pydantic_dicts(item, destination_value))
193+
else:
194+
converted_list.append(item)
195+
return converted_list
196+
197+
180198
def deep_union_pydantic_dicts(
181199
source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any]
182200
) -> typing.Dict[str, typing.Any]:
183201
for key, value in source.items():
202+
node = destination.setdefault(key, {})
184203
if isinstance(value, dict):
185-
node = destination.setdefault(key, {})
186204
deep_union_pydantic_dicts(value, node)
205+
# Note: we do not do this same processing for sets given we do not have sets of models
206+
# and given the sets are unordered, the processing of the set and matching objects would
207+
# be non-trivial.
208+
elif isinstance(value, list):
209+
destination[key] = _union_list_of_pydantic_dicts(value, node)
187210
else:
188211
destination[key] = value
189212

src/hume/core/serialization.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,24 @@ def convert_and_respect_annotation_metadata(
7171
if typing_extensions.is_typeddict(clean_type) and isinstance(object_, typing.Mapping):
7272
return _convert_mapping(object_, clean_type, direction)
7373

74+
if (
75+
typing_extensions.get_origin(clean_type) == typing.Dict
76+
or typing_extensions.get_origin(clean_type) == dict
77+
or clean_type == typing.Dict
78+
) and isinstance(object_, typing.Dict):
79+
key_type = typing_extensions.get_args(clean_type)[0]
80+
value_type = typing_extensions.get_args(clean_type)[1]
81+
82+
return {
83+
key: convert_and_respect_annotation_metadata(
84+
object_=value,
85+
annotation=annotation,
86+
inner_type=value_type,
87+
direction=direction,
88+
)
89+
for key, value in object_.items()
90+
}
91+
7492
# If you're iterating on a string, do not bother to coerce it to a sequence.
7593
if not isinstance(object_, str):
7694
if (

src/hume/expression_measurement/batch/client.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -350,7 +350,7 @@ def start_inference_job_from_local_file(
350350
self,
351351
*,
352352
file: typing.List[core.File],
353-
json: typing.Optional[InferenceBaseRequest] = None,
353+
json: typing.Optional[InferenceBaseRequest] = OMIT,
354354
request_options: typing.Optional[RequestOptions] = None,
355355
) -> str:
356356
"""
@@ -766,7 +766,7 @@ async def start_inference_job_from_local_file(
766766
self,
767767
*,
768768
file: typing.List[core.File],
769-
json: typing.Optional[InferenceBaseRequest] = None,
769+
json: typing.Optional[InferenceBaseRequest] = OMIT,
770770
request_options: typing.Optional[RequestOptions] = None,
771771
) -> str:
772772
"""

0 commit comments

Comments
 (0)