Skip to content

Commit 7e3e508

Browse files
authored
Minor fixes, #523 followup (#563)
1 parent 8143df6 commit 7e3e508

File tree

4 files changed

+4
-8
lines changed

4 files changed

+4
-8
lines changed

pyiceberg/table/__init__.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1131,8 +1131,9 @@ def append(self, df: pa.Table, snapshot_properties: Dict[str, str] = EMPTY_DICT)
11311131

11321132
_check_schema_compatible(self.schema(), other_schema=df.schema)
11331133
# cast if the two schemas are compatible but not equal
1134-
if self.schema().as_arrow() != df.schema:
1135-
df = df.cast(self.schema().as_arrow())
1134+
table_arrow_schema = self.schema().as_arrow()
1135+
if table_arrow_schema != df.schema:
1136+
df = df.cast(table_arrow_schema)
11361137

11371138
with self.transaction() as txn:
11381139
with txn.update_snapshot(snapshot_properties=snapshot_properties).fast_append() as update_snapshot:

tests/catalog/test_sql.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -197,7 +197,7 @@ def test_create_table_with_pyarrow_schema(
197197
'catalog',
198198
[
199199
lazy_fixture('catalog_memory'),
200-
# lazy_fixture('catalog_sqlite'),
200+
lazy_fixture('catalog_sqlite'),
201201
],
202202
)
203203
def test_write_pyarrow_schema(catalog: SqlCatalog, random_identifier: Identifier) -> None:
@@ -220,9 +220,6 @@ def test_write_pyarrow_schema(catalog: SqlCatalog, random_identifier: Identifier
220220
database_name, _table_name = random_identifier
221221
catalog.create_namespace(database_name)
222222
table = catalog.create_table(random_identifier, pyarrow_table.schema)
223-
print(pyarrow_table.schema)
224-
print(table.schema().as_struct())
225-
print()
226223
table.overwrite(pyarrow_table)
227224

228225

tests/integration/test_reads.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -274,7 +274,6 @@ def test_ray_nan_rewritten(catalog: Catalog) -> None:
274274
def test_ray_not_nan_count(catalog: Catalog) -> None:
275275
table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten")
276276
ray_dataset = table_test_null_nan_rewritten.scan(row_filter=NotNaN("col_numeric"), selected_fields=("idx",)).to_ray()
277-
print(ray_dataset.take())
278277
assert ray_dataset.count() == 2
279278

280279

tests/integration/test_writes.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -480,7 +480,6 @@ def test_write_parquet_other_properties(
480480
properties: Dict[str, Any],
481481
expected_kwargs: Dict[str, Any],
482482
) -> None:
483-
print(type(mocker))
484483
identifier = "default.test_write_parquet_other_properties"
485484

486485
# The properties we test cannot be checked on the resulting Parquet file, so we spy on the ParquetWriter call instead

0 commit comments

Comments
 (0)