@@ -1052,10 +1052,12 @@ def append(self, df: pa.Table) -> None:
10521052 if len (self .spec ().fields ) > 0 :
10531053 raise ValueError ("Cannot write to partitioned tables" )
10541054
1055+ from pyiceberg .io .pyarrow import schema_to_pyarrow
1056+
10551057 _check_schema_compatible (self .schema (), other_schema = df .schema )
10561058 # cast if the two schemas are compatible but not equal
1057- if self .schema (). as_arrow ( ) != df .schema :
1058- df = df .cast (self .schema (). as_arrow ( ))
1059+ if schema_to_pyarrow ( self .schema ()) != df .schema :
1060+ df = df .cast (schema_to_pyarrow ( self .schema ()))
10591061
10601062 merge = _MergingSnapshotProducer (operation = Operation .APPEND , table = self )
10611063
@@ -1090,10 +1092,12 @@ def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_T
10901092 if len (self .spec ().fields ) > 0 :
10911093 raise ValueError ("Cannot write to partitioned tables" )
10921094
1095+ from pyiceberg .io .pyarrow import schema_to_pyarrow
1096+
10931097 _check_schema_compatible (self .schema (), other_schema = df .schema )
10941098 # cast if the two schemas are compatible but not equal
1095- if self .schema (). as_arrow ( ) != df .schema :
1096- df = df .cast (self .schema (). as_arrow ( ))
1099+ if schema_to_pyarrow ( self .schema ()) != df .schema :
1100+ df = df .cast (schema_to_pyarrow ( self .schema ()))
10971101
10981102 merge = _MergingSnapshotProducer (
10991103 operation = Operation .OVERWRITE if self .current_snapshot () is not None else Operation .APPEND ,
0 commit comments