Skip to content

Commit aa6d689

Browse files
committed
Lint
1 parent dd1a0dc commit aa6d689

File tree

3 files changed

+4
-4
lines changed

3 files changed

+4
-4
lines changed

awswrangler/_threading.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,4 +37,4 @@ def map(self, func: Callable[..., List[str]], boto3_session: boto3.Session, *ite
3737
args = (itertools.repeat(boto3_primitives), *iterables)
3838
return list(self._exec.map(func, *args))
3939
# Single-threaded
40-
return list(map(func, *(itertools.repeat(boto3_session), *iterables))) # type: ignore
40+
return list(map(func, *(itertools.repeat(boto3_session), *iterables))) # type: ignore

awswrangler/distributed/datasources/parquet_datasource.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,10 @@
55

66
import numpy as np
77
import pyarrow as pa
8-
import pyarrow.parquet as pq
98

109
# fs required to implicitly trigger S3 subsystem initialization
1110
import pyarrow.fs # noqa: F401 pylint: disable=unused-import
11+
import pyarrow.parquet as pq
1212
from ray import cloudpickle
1313
from ray.data.context import DatasetContext
1414
from ray.data.datasource.datasource import ReadTask
@@ -46,7 +46,6 @@ def prepare_read(
4646
_block_udf: Optional[Callable[..., Any]] = None,
4747
) -> List[ReadTask]:
4848
"""Create and return read tasks for a Parquet file-based datasource."""
49-
5049
paths, filesystem = _resolve_paths_and_filesystem(paths, filesystem)
5150

5251
parquet_dataset = pq.ParquetDataset(

awswrangler/s3/_read_parquet.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ def _read_schemas_from_files(
9797
paths = _utils.list_sampling(lst=paths, sampling=sampling)
9898

9999
executor = _get_executor(use_threads=use_threads)
100-
return ray_get(
100+
schemas = ray_get(
101101
executor.map(
102102
_read_parquet_metadata_file,
103103
boto3_session,
@@ -108,6 +108,7 @@ def _read_schemas_from_files(
108108
itertools.repeat(coerce_int96_timestamp_unit),
109109
)
110110
)
111+
return [schema for schema in schemas if schema is not None]
111112

112113

113114
def _validate_schemas(schemas: List[pa.schema], validate_schema: bool) -> pa.schema:

0 commit comments

Comments
 (0)