@@ -365,7 +365,7 @@ impl PySessionContext {
365365 table_partition_cols
366366 . into_iter ( )
367367 . map ( |( name, ty) | ( name, ty. 0 ) )
368- . collect :: < Vec < ( String , DataType ) > > ( )
368+ . collect :: < Vec < ( String , DataType ) > > ( ) ,
369369 )
370370 . with_file_sort_order (
371371 file_sort_order
@@ -647,7 +647,7 @@ impl PySessionContext {
647647 table_partition_cols
648648 . into_iter ( )
649649 . map ( |( name, ty) | ( name, ty. 0 ) )
650- . collect :: < Vec < ( String , DataType ) > > ( )
650+ . collect :: < Vec < ( String , DataType ) > > ( ) ,
651651 )
652652 . parquet_pruning ( parquet_pruning)
653653 . skip_metadata ( skip_metadata) ;
@@ -742,7 +742,7 @@ impl PySessionContext {
742742 table_partition_cols
743743 . into_iter ( )
744744 . map ( |( name, ty) | ( name, ty. 0 ) )
745- . collect :: < Vec < ( String , DataType ) > > ( )
745+ . collect :: < Vec < ( String , DataType ) > > ( ) ,
746746 ) ;
747747 options. schema_infer_max_records = schema_infer_max_records;
748748 options. file_extension = file_extension;
@@ -773,13 +773,12 @@ impl PySessionContext {
773773 . to_str ( )
774774 . ok_or_else ( || PyValueError :: new_err ( "Unable to convert path to a string" ) ) ?;
775775
776- let mut options = AvroReadOptions :: default ( )
777- . table_partition_cols (
778- table_partition_cols
779- . into_iter ( )
780- . map ( |( name, ty) | ( name, ty. 0 ) )
781- . collect :: < Vec < ( String , DataType ) > > ( )
782- ) ;
776+ let mut options = AvroReadOptions :: default ( ) . table_partition_cols (
777+ table_partition_cols
778+ . into_iter ( )
779+ . map ( |( name, ty) | ( name, ty. 0 ) )
780+ . collect :: < Vec < ( String , DataType ) > > ( ) ,
781+ ) ;
783782 options. file_extension = file_extension;
784783 options. schema = schema. as_ref ( ) . map ( |x| & x. 0 ) ;
785784
@@ -882,7 +881,7 @@ impl PySessionContext {
882881 table_partition_cols
883882 . into_iter ( )
884883 . map ( |( name, ty) | ( name, ty. 0 ) )
885- . collect :: < Vec < ( String , DataType ) > > ( )
884+ . collect :: < Vec < ( String , DataType ) > > ( ) ,
886885 )
887886 . file_compression_type ( parse_file_compression_type ( file_compression_type) ?) ;
888887 options. schema_infer_max_records = schema_infer_max_records;
@@ -936,7 +935,7 @@ impl PySessionContext {
936935 table_partition_cols
937936 . into_iter ( )
938937 . map ( |( name, ty) | ( name, ty. 0 ) )
939- . collect :: < Vec < ( String , DataType ) > > ( )
938+ . collect :: < Vec < ( String , DataType ) > > ( ) ,
940939 )
941940 . file_compression_type ( parse_file_compression_type ( file_compression_type) ?) ;
942941 options. schema = schema. as_ref ( ) . map ( |x| & x. 0 ) ;
@@ -980,7 +979,7 @@ impl PySessionContext {
980979 table_partition_cols
981980 . into_iter ( )
982981 . map ( |( name, ty) | ( name, ty. 0 ) )
983- . collect :: < Vec < ( String , DataType ) > > ( )
982+ . collect :: < Vec < ( String , DataType ) > > ( ) ,
984983 )
985984 . parquet_pruning ( parquet_pruning)
986985 . skip_metadata ( skip_metadata) ;
@@ -1007,13 +1006,12 @@ impl PySessionContext {
10071006 file_extension : & str ,
10081007 py : Python ,
10091008 ) -> PyDataFusionResult < PyDataFrame > {
1010- let mut options = AvroReadOptions :: default ( )
1011- . table_partition_cols (
1012- table_partition_cols
1013- . into_iter ( )
1014- . map ( |( name, ty) | ( name, ty. 0 ) )
1015- . collect :: < Vec < ( String , DataType ) > > ( )
1016- ) ;
1009+ let mut options = AvroReadOptions :: default ( ) . table_partition_cols (
1010+ table_partition_cols
1011+ . into_iter ( )
1012+ . map ( |( name, ty) | ( name, ty. 0 ) )
1013+ . collect :: < Vec < ( String , DataType ) > > ( ) ,
1014+ ) ;
10171015 options. file_extension = file_extension;
10181016 let df = if let Some ( schema) = schema {
10191017 options. schema = Some ( & schema. 0 ) ;
0 commit comments