Skip to content

chore(deps-dev): bump the development-dependencies group with 3 updates #3072

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Jan 13, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions awswrangler/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,8 +121,7 @@ def inner(*args: Any, **kwargs: Any) -> Any:
package_name = INSTALL_MAPPING.get(name)
install_name = package_name if package_name is not None else name
raise ModuleNotFoundError(
f"Missing optional dependency '{name}'. "
f"Use pip install awswrangler[{install_name}] to install it."
f"Missing optional dependency '{name}'. Use pip install awswrangler[{install_name}] to install it."
)
return func(*args, **kwargs)

Expand Down
2 changes: 1 addition & 1 deletion awswrangler/athena/_read.py
Original file line number Diff line number Diff line change
Expand Up @@ -610,7 +610,7 @@ def _unload(
if partitioned_by:
unload_parameters += f" , partitioned_by=ARRAY{partitioned_by}"

sql = f"UNLOAD ({sql}) " f"TO '{path}' " f"WITH ({unload_parameters})"
sql = f"UNLOAD ({sql}) TO '{path}' WITH ({unload_parameters})"
_logger.debug("Executing unload query: %s", sql)
try:
query_id: str = _start_query_execution(
Expand Down
8 changes: 4 additions & 4 deletions awswrangler/athena/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1025,13 +1025,13 @@ def parse_properties(parameters: dict[str, str]) -> str:

query_parts += [
"""ROW FORMAT SERDE """,
f""" '{table_detail['StorageDescriptor']['SerdeInfo']['SerializationLibrary']}' """,
f""" '{table_detail["StorageDescriptor"]["SerdeInfo"]["SerializationLibrary"]}' """,
"""STORED AS INPUTFORMAT """,
f""" '{table_detail['StorageDescriptor']['InputFormat']}' """,
f""" '{table_detail["StorageDescriptor"]["InputFormat"]}' """,
"""OUTPUTFORMAT """,
f""" '{table_detail['StorageDescriptor']['OutputFormat']}'""",
f""" '{table_detail["StorageDescriptor"]["OutputFormat"]}'""",
"""LOCATION""",
f""" '{table_detail['StorageDescriptor']['Location']}'""",
f""" '{table_detail["StorageDescriptor"]["Location"]}'""",
f"""TBLPROPERTIES (\n{tblproperties})""",
]
sql = "\n".join(query_parts)
Expand Down
14 changes: 7 additions & 7 deletions awswrangler/athena/_write_iceberg.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,7 +309,7 @@ def _merge_iceberg(
if merge_cols:
if merge_condition == "update":
match_condition = f"""WHEN MATCHED THEN
UPDATE SET {', '.join([f'"{x}" = source."{x}"' for x in df.columns])}"""
UPDATE SET {", ".join([f'"{x}" = source."{x}"' for x in df.columns])}"""
else:
match_condition = ""

Expand All @@ -321,16 +321,16 @@ def _merge_iceberg(
sql_statement = f"""
MERGE INTO "{database}"."{table}" target
USING "{database}"."{source_table}" source
ON {' AND '.join(merge_conditions)}
ON {" AND ".join(merge_conditions)}
{match_condition}
WHEN NOT MATCHED THEN
INSERT ({', '.join([f'"{x}"' for x in df.columns])})
VALUES ({', '.join([f'source."{x}"' for x in df.columns])})
INSERT ({", ".join([f'"{x}"' for x in df.columns])})
VALUES ({", ".join([f'source."{x}"' for x in df.columns])})
"""
else:
sql_statement = f"""
INSERT INTO "{database}"."{table}" ({', '.join([f'"{x}"' for x in df.columns])})
SELECT {', '.join([f'"{x}"' for x in df.columns])}
INSERT INTO "{database}"."{table}" ({", ".join([f'"{x}"' for x in df.columns])})
SELECT {", ".join([f'"{x}"' for x in df.columns])}
FROM "{database}"."{source_table}"
"""

Expand Down Expand Up @@ -763,7 +763,7 @@ def delete_from_iceberg_table(
sql_statement = f"""
MERGE INTO "{database}"."{table}" target
USING "{database}"."{temp_table}" source
ON {' AND '.join([f'target."{x}" = source."{x}"' for x in merge_cols])}
ON {" AND ".join([f'target."{x}" = source."{x}"' for x in merge_cols])}
WHEN MATCHED THEN
DELETE
"""
Expand Down
2 changes: 1 addition & 1 deletion awswrangler/dynamodb/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ def _remove_dup_pkeys_request_if_any(
if self._extract_pkey_values(item, overwrite_by_pkeys) == pkey_values_new:
self._items_buffer.remove(item)
_logger.debug(
"With overwrite_by_pkeys enabled, skipping " "request:%s",
"With overwrite_by_pkeys enabled, skipping request:%s",
item,
)

Expand Down
16 changes: 8 additions & 8 deletions awswrangler/oracle.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@ def _validate_connection(con: "oracledb.Connection") -> None:


def _get_table_identifier(schema: str | None, table: str) -> str:
schema_str = f'{identifier(schema, sql_mode="ansi")}.' if schema else ""
table_identifier = f'{schema_str}{identifier(table, sql_mode="ansi")}'
schema_str = f"{identifier(schema, sql_mode='ansi')}." if schema else ""
table_identifier = f"{schema_str}{identifier(table, sql_mode='ansi')}"
return table_identifier


Expand Down Expand Up @@ -104,10 +104,10 @@ def _create_table(
varchar_lengths=varchar_lengths,
converter_func=_data_types.pyarrow2oracle,
)
cols_str: str = "".join([f'{identifier(k, sql_mode="ansi")} {v},\n' for k, v in oracle_types.items()])[:-2]
cols_str: str = "".join([f"{identifier(k, sql_mode='ansi')} {v},\n" for k, v in oracle_types.items()])[:-2]

if primary_keys:
primary_keys_str = ", ".join([f'{identifier(k, sql_mode="ansi")}' for k in primary_keys])
primary_keys_str = ", ".join([f"{identifier(k, sql_mode='ansi')}" for k in primary_keys])
else:
primary_keys_str = None

Expand Down Expand Up @@ -469,17 +469,17 @@ def _generate_upsert_statement(

non_primary_key_columns = [key for key in df.columns if key not in set(primary_keys)]

primary_keys_str = ", ".join([f'{identifier(key, sql_mode="ansi")}' for key in primary_keys])
columns_str = ", ".join([f'{identifier(key, sql_mode="ansi")}' for key in non_primary_key_columns])
primary_keys_str = ", ".join([f"{identifier(key, sql_mode='ansi')}" for key in primary_keys])
columns_str = ", ".join([f"{identifier(key, sql_mode='ansi')}" for key in non_primary_key_columns])

column_placeholders: str = f"({', '.join([':' + str(i + 1) for i in range(len(df.columns))])})"

primary_key_condition_str = " AND ".join(
[f'{identifier(key, sql_mode="ansi")} = :{i+1}' for i, key in enumerate(primary_keys)]
[f"{identifier(key, sql_mode='ansi')} = :{i + 1}" for i, key in enumerate(primary_keys)]
)
assignment_str = ", ".join(
[
f'{identifier(col, sql_mode="ansi")} = :{i + len(primary_keys) + 1}'
f"{identifier(col, sql_mode='ansi')} = :{i + len(primary_keys) + 1}"
for i, col in enumerate(non_primary_key_columns)
]
)
Expand Down
11 changes: 4 additions & 7 deletions awswrangler/redshift/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def _begin_transaction(cursor: "redshift_connector.Cursor") -> None:
def _drop_table(cursor: "redshift_connector.Cursor", schema: str | None, table: str, cascade: bool = False) -> None:
schema_str = f'"{schema}".' if schema else ""
cascade_str = " CASCADE" if cascade else ""
sql = f'DROP TABLE IF EXISTS {schema_str}"{table}"' f"{cascade_str}"
sql = f'DROP TABLE IF EXISTS {schema_str}"{table}"{cascade_str}'
_logger.debug("Executing drop table query:\n%s", sql)
cursor.execute(sql)

Expand Down Expand Up @@ -130,10 +130,7 @@ def _add_table_columns(
def _does_table_exist(cursor: "redshift_connector.Cursor", schema: str | None, table: str) -> bool:
schema_str = f"TABLE_SCHEMA = '{schema}' AND" if schema else ""
sql = (
f"SELECT true WHERE EXISTS ("
f"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE "
f"{schema_str} TABLE_NAME = '{table}'"
f");"
f"SELECT true WHERE EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {schema_str} TABLE_NAME = '{table}');"
)
_logger.debug("Executing select query:\n%s", sql)
cursor.execute(sql)
Expand Down Expand Up @@ -236,12 +233,12 @@ def _validate_parameters(
if sortkey:
if not isinstance(sortkey, list):
raise exceptions.InvalidRedshiftSortkey(
f"sortkey must be a List of items in the columns list: {cols}. " f"Currently value: {sortkey}"
f"sortkey must be a List of items in the columns list: {cols}. Currently value: {sortkey}"
)
for key in sortkey:
if key not in cols:
raise exceptions.InvalidRedshiftSortkey(
f"sortkey must be a List of items in the columns list: {cols}. " f"Currently value: {key}"
f"sortkey must be a List of items in the columns list: {cols}. Currently value: {key}"
)
if primary_keys:
if not isinstance(primary_keys, list):
Expand Down
3 changes: 1 addition & 2 deletions awswrangler/sqlserver.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,8 +536,7 @@ def to_sql(
sql = f"MERGE INTO {table_identifier}\nUSING (VALUES {placeholders}) AS source ({quoted_columns})\n"
sql += f"ON {' AND '.join(f'{table_identifier}.{col}=source.{col}' for col in merge_on_columns)}\n"
sql += (
f"WHEN MATCHED THEN\n UPDATE "
f"SET {', '.join(f'{col}=source.{col}' for col in column_names)}\n"
f"WHEN MATCHED THEN\n UPDATE SET {', '.join(f'{col}=source.{col}' for col in column_names)}\n"
)
sql += (
f"WHEN NOT MATCHED THEN\n INSERT "
Expand Down
2 changes: 1 addition & 1 deletion awswrangler/timestream/_read.py
Original file line number Diff line number Diff line change
Expand Up @@ -409,7 +409,7 @@ def unload_to_files(
timestream_client = _utils.client(service_name="timestream-query", session=boto3_session)

partitioned_by_str: str = (
f"""partitioned_by = ARRAY [{','.join([f"'{col}'" for col in partition_cols])}],\n"""
f"""partitioned_by = ARRAY [{",".join([f"'{col}'" for col in partition_cols])}],\n"""
if partition_cols is not None
else ""
)
Expand Down
289 changes: 152 additions & 137 deletions poetry.lock

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -100,13 +100,13 @@ ray = ["ray"]
setuptools = "*"
wheel = "^0.45.1"
msgpack = "*"
poetry = "^1.8.5"
poetry = "^2.0.1"

# Lint
boto3-stubs = {version = "^1.35.90", extras = ["athena", "cleanrooms", "chime", "cloudwatch", "dynamodb", "ec2", "emr", "emr-serverless", "glue", "kms", "logs", "neptune", "opensearch", "opensearchserverless", "quicksight", "rds", "rds-data", "redshift", "redshift-data", "s3", "secretsmanager", "ssm", "sts", "timestream-query", "timestream-write"]}
boto3-stubs = {version = "^1.35.97", extras = ["athena", "cleanrooms", "chime", "cloudwatch", "dynamodb", "ec2", "emr", "emr-serverless", "glue", "kms", "logs", "neptune", "opensearch", "opensearchserverless", "quicksight", "rds", "rds-data", "redshift", "redshift-data", "s3", "secretsmanager", "ssm", "sts", "timestream-query", "timestream-write"]}
doc8 = "^1.1"
mypy = "^1.14"
ruff = "^0.8.4"
ruff = "^0.9.1"

# Test
moto = "^5.0"
Expand Down
19 changes: 5 additions & 14 deletions tests/unit/test_mysql.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ def test_connect_secret_manager(dbname):

def test_insert_with_column_names(mysql_table, mysql_con):
create_table_sql = (
f"CREATE TABLE test.{mysql_table} " "(c0 varchar(100) NULL, " "c1 INT DEFAULT 42 NULL, " "c2 INT NOT NULL);"
f"CREATE TABLE test.{mysql_table} (c0 varchar(100) NULL, c1 INT DEFAULT 42 NULL, c2 INT NOT NULL);"
)
with mysql_con.cursor() as cursor:
cursor.execute(create_table_sql)
Expand All @@ -236,7 +236,7 @@ def test_insert_with_column_names(mysql_table, mysql_con):

def test_upsert_distinct(mysql_table, mysql_con):
create_table_sql = (
f"CREATE TABLE test.{mysql_table} " "(c0 varchar(100) NULL, " "c1 INT DEFAULT 42 NULL, " "c2 INT NOT NULL);"
f"CREATE TABLE test.{mysql_table} (c0 varchar(100) NULL, c1 INT DEFAULT 42 NULL, c2 INT NOT NULL);"
)
with mysql_con.cursor() as cursor:
cursor.execute(create_table_sql)
Expand Down Expand Up @@ -276,10 +276,7 @@ def test_upsert_distinct(mysql_table, mysql_con):

def test_upsert_duplicate_key(mysql_table, mysql_con):
create_table_sql = (
f"CREATE TABLE test.{mysql_table} "
"(c0 varchar(100) PRIMARY KEY, "
"c1 INT DEFAULT 42 NULL, "
"c2 INT NOT NULL);"
f"CREATE TABLE test.{mysql_table} (c0 varchar(100) PRIMARY KEY, c1 INT DEFAULT 42 NULL, c2 INT NOT NULL);"
)
with mysql_con.cursor() as cursor:
cursor.execute(create_table_sql)
Expand Down Expand Up @@ -319,10 +316,7 @@ def test_upsert_duplicate_key(mysql_table, mysql_con):

def test_upsert_replace(mysql_table, mysql_con):
create_table_sql = (
f"CREATE TABLE test.{mysql_table} "
"(c0 varchar(100) PRIMARY KEY, "
"c1 INT DEFAULT 42 NULL, "
"c2 INT NOT NULL);"
f"CREATE TABLE test.{mysql_table} (c0 varchar(100) PRIMARY KEY, c1 INT DEFAULT 42 NULL, c2 INT NOT NULL);"
)
with mysql_con.cursor() as cursor:
cursor.execute(create_table_sql)
Expand Down Expand Up @@ -375,10 +369,7 @@ def test_dfs_are_equal_for_different_chunksizes(mysql_table, mysql_con, chunksiz

def test_ignore(mysql_table, mysql_con):
create_table_sql = (
f"CREATE TABLE test.{mysql_table} "
"(c0 varchar(100) PRIMARY KEY, "
"c1 INT DEFAULT 42 NULL, "
"c2 INT NOT NULL);"
f"CREATE TABLE test.{mysql_table} (c0 varchar(100) PRIMARY KEY, c1 INT DEFAULT 42 NULL, c2 INT NOT NULL);"
)
with mysql_con.cursor() as cursor:
cursor.execute(create_table_sql)
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/test_postgresql.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ def test_connect_secret_manager(dbname):

def test_insert_with_column_names(postgresql_table, postgresql_con):
create_table_sql = (
f"CREATE TABLE public.{postgresql_table} " "(c0 varchar NULL," "c1 int NULL DEFAULT 42," "c2 int NOT NULL);"
f"CREATE TABLE public.{postgresql_table} (c0 varchar NULL,c1 int NULL DEFAULT 42,c2 int NOT NULL);"
)
with postgresql_con.cursor() as cursor:
cursor.execute(create_table_sql)
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/test_redshift.py
Original file line number Diff line number Diff line change
Expand Up @@ -1219,7 +1219,7 @@ def test_failed_keep_files(

def test_insert_with_column_names(redshift_table: str, redshift_con: redshift_connector.Connection) -> None:
create_table_sql = (
f"CREATE TABLE public.{redshift_table} " "(c0 varchar(100), " "c1 integer default 42, " "c2 integer not null);"
f"CREATE TABLE public.{redshift_table} (c0 varchar(100), c1 integer default 42, c2 integer not null);"
)
with redshift_con.cursor() as cursor:
cursor.execute(create_table_sql)
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/test_sqlserver.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ def test_connect_secret_manager(dbname):

def test_insert_with_column_names(sqlserver_table, sqlserver_con):
create_table_sql = (
f"CREATE TABLE dbo.{sqlserver_table} " "(c0 varchar(100) NULL," "c1 INT DEFAULT 42 NULL," "c2 INT NOT NULL);"
f"CREATE TABLE dbo.{sqlserver_table} (c0 varchar(100) NULL,c1 INT DEFAULT 42 NULL,c2 INT NOT NULL);"
)
with sqlserver_con.cursor() as cursor:
cursor.execute(create_table_sql)
Expand Down
4 changes: 2 additions & 2 deletions tutorials/003 - Amazon S3.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -874,10 +874,10 @@
"metadata": {},
"outputs": [],
"source": [
"content = \"1 Herfelingen 27-12-18\\n\" \"2 Lambusart 14-06-18\\n\" \"3 Spormaggiore 15-04-18\"\n",
"content = \"1 Herfelingen 27-12-18\\n2 Lambusart 14-06-18\\n3 Spormaggiore 15-04-18\"\n",
"boto3.client(\"s3\").put_object(Body=content, Bucket=bucket, Key=\"fwf/file1.txt\")\n",
"\n",
"content = \"4 Buizingen 05-09-19\\n\" \"5 San Rafael 04-09-19\"\n",
"content = \"4 Buizingen 05-09-19\\n5 San Rafael 04-09-19\"\n",
"boto3.client(\"s3\").put_object(Body=content, Bucket=bucket, Key=\"fwf/file2.txt\")\n",
"\n",
"path1 = f\"s3://{bucket}/fwf/file1.txt\"\n",
Expand Down
10 changes: 5 additions & 5 deletions tutorials/024 - Athena Query Metadata.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -127,11 +127,11 @@
}
],
"source": [
"print(f'DataScannedInBytes: {df.query_metadata[\"Statistics\"][\"DataScannedInBytes\"]}')\n",
"print(f'TotalExecutionTimeInMillis: {df.query_metadata[\"Statistics\"][\"TotalExecutionTimeInMillis\"]}')\n",
"print(f'QueryQueueTimeInMillis: {df.query_metadata[\"Statistics\"][\"QueryQueueTimeInMillis\"]}')\n",
"print(f'QueryPlanningTimeInMillis: {df.query_metadata[\"Statistics\"][\"QueryPlanningTimeInMillis\"]}')\n",
"print(f'ServiceProcessingTimeInMillis: {df.query_metadata[\"Statistics\"][\"ServiceProcessingTimeInMillis\"]}')"
"print(f\"DataScannedInBytes: {df.query_metadata['Statistics']['DataScannedInBytes']}\")\n",
"print(f\"TotalExecutionTimeInMillis: {df.query_metadata['Statistics']['TotalExecutionTimeInMillis']}\")\n",
"print(f\"QueryQueueTimeInMillis: {df.query_metadata['Statistics']['QueryQueueTimeInMillis']}\")\n",
"print(f\"QueryPlanningTimeInMillis: {df.query_metadata['Statistics']['QueryPlanningTimeInMillis']}\")\n",
"print(f\"ServiceProcessingTimeInMillis: {df.query_metadata['Statistics']['ServiceProcessingTimeInMillis']}\")"
]
}
],
Expand Down
Loading