Skip to content

Commit 276dc6a

Browse files
authored
chore: apply cargo fmt with import organization (#1303)
* Apply nightly format to organize imports consistently * Set matrix for cargo fmt * Revert "Set matrix for cargo fmt" This reverts commit 8511905. * Instead of creating a large matrix just add one workflow for nightly fmt * Intentionally cause cargo fmt to fail in nightly * Apply nightly fmt
1 parent f1b3029 commit 276dc6a

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

78 files changed

+455
-387
lines changed

.github/workflows/test.yaml

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -79,10 +79,6 @@ jobs:
7979
path: ~/.cargo
8080
key: cargo-cache-${{ steps.rust-toolchain.outputs.cachekey }}-${{ hashFiles('Cargo.lock') }}
8181

82-
- name: Check Formatting
83-
if: ${{ matrix.python-version == '3.10' && matrix.toolchain == 'stable' }}
84-
run: cargo fmt -- --check
85-
8682
- name: Run Clippy
8783
if: ${{ matrix.python-version == '3.10' && matrix.toolchain == 'stable' }}
8884
run: cargo clippy --all-targets --all-features -- -D clippy::all -D warnings -A clippy::redundant_closure
@@ -125,3 +121,19 @@ jobs:
125121
cd examples/tpch
126122
uv run --no-project python convert_data_to_parquet.py
127123
uv run --no-project pytest _tests.py
124+
125+
nightly-fmt:
126+
runs-on: ubuntu-latest
127+
128+
steps:
129+
- uses: actions/checkout@v5
130+
131+
- name: Setup Rust Toolchain
132+
uses: dtolnay/rust-toolchain@stable
133+
id: rust-toolchain
134+
with:
135+
toolchain: "nightly"
136+
components: clippy,rustfmt
137+
138+
- name: Check Formatting
139+
run: cargo +nightly fmt -- --check

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ repos:
3333
- id: rust-fmt
3434
name: Rust fmt
3535
description: Run cargo fmt on files included in the commit. rustfmt should be installed before-hand.
36-
entry: cargo fmt --all --
36+
entry: cargo +nightly fmt --all --
3737
pass_filenames: true
3838
types: [file, rust]
3939
language: system

ci/scripts/rust_fmt.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,4 +18,4 @@
1818
# under the License.
1919

2020
set -ex
21-
cargo fmt --all -- --check
21+
cargo +nightly fmt --all -- --check

rustfmt.toml

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
# Licensed to the Apache Software Foundation (ASF) under one
2+
# or more contributor license agreements. See the NOTICE file
3+
# distributed with this work for additional information
4+
# regarding copyright ownership. The ASF licenses this file
5+
# to you under the Apache License, Version 2.0 (the
6+
# "License"); you may not use this file except in compliance
7+
# with the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing,
12+
# software distributed under the License is distributed on an
13+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14+
# KIND, either express or implied. See the License for the
15+
# specific language governing permissions and limitations
16+
# under the License.
17+
18+
group_imports = "StdExternalCrate"
19+
imports_granularity = "Module"

src/catalog.rs

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -15,25 +15,26 @@
1515
// specific language governing permissions and limitations
1616
// under the License.
1717

18-
use crate::dataset::Dataset;
19-
use crate::errors::{py_datafusion_err, to_datafusion_err, PyDataFusionError, PyDataFusionResult};
20-
use crate::table::PyTable;
21-
use crate::utils::{validate_pycapsule, wait_for_future};
18+
use std::any::Any;
19+
use std::collections::HashSet;
20+
use std::sync::Arc;
21+
2222
use async_trait::async_trait;
23-
use datafusion::catalog::{MemoryCatalogProvider, MemorySchemaProvider};
24-
use datafusion::common::DataFusionError;
25-
use datafusion::{
26-
catalog::{CatalogProvider, SchemaProvider},
27-
datasource::TableProvider,
23+
use datafusion::catalog::{
24+
CatalogProvider, MemoryCatalogProvider, MemorySchemaProvider, SchemaProvider,
2825
};
26+
use datafusion::common::DataFusionError;
27+
use datafusion::datasource::TableProvider;
2928
use datafusion_ffi::schema_provider::{FFI_SchemaProvider, ForeignSchemaProvider};
3029
use pyo3::exceptions::PyKeyError;
3130
use pyo3::prelude::*;
3231
use pyo3::types::PyCapsule;
3332
use pyo3::IntoPyObjectExt;
34-
use std::any::Any;
35-
use std::collections::HashSet;
36-
use std::sync::Arc;
33+
34+
use crate::dataset::Dataset;
35+
use crate::errors::{py_datafusion_err, to_datafusion_err, PyDataFusionError, PyDataFusionResult};
36+
use crate::table::PyTable;
37+
use crate::utils::{validate_pycapsule, wait_for_future};
3738

3839
#[pyclass(frozen, name = "RawCatalog", module = "datafusion.catalog", subclass)]
3940
#[derive(Clone)]

src/common/data_type.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@ use datafusion::arrow::array::Array;
1919
use datafusion::arrow::datatypes::{DataType, IntervalUnit, TimeUnit};
2020
use datafusion::common::ScalarValue;
2121
use datafusion::logical_expr::sqlparser::ast::NullTreatment as DFNullTreatment;
22-
use pyo3::exceptions::PyNotImplementedError;
23-
use pyo3::{exceptions::PyValueError, prelude::*};
22+
use pyo3::exceptions::{PyNotImplementedError, PyValueError};
23+
use pyo3::prelude::*;
2424

2525
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd)]
2626
pub struct PyScalarValue(pub ScalarValue);

src/common/schema.rs

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -15,26 +15,25 @@
1515
// specific language governing permissions and limitations
1616
// under the License.
1717

18+
use std::any::Any;
19+
use std::borrow::Cow;
1820
use std::fmt::{self, Display, Formatter};
1921
use std::sync::Arc;
20-
use std::{any::Any, borrow::Cow};
2122

2223
use arrow::datatypes::Schema;
2324
use arrow::pyarrow::PyArrowType;
2425
use datafusion::arrow::datatypes::SchemaRef;
2526
use datafusion::common::Constraints;
2627
use datafusion::datasource::TableType;
28+
use datafusion::logical_expr::utils::split_conjunction;
2729
use datafusion::logical_expr::{Expr, TableProviderFilterPushDown, TableSource};
30+
use parking_lot::RwLock;
2831
use pyo3::prelude::*;
2932

30-
use datafusion::logical_expr::utils::split_conjunction;
31-
33+
use super::data_type::DataTypeMap;
34+
use super::function::SqlFunction;
3235
use crate::sql::logical::PyLogicalPlan;
3336

34-
use super::{data_type::DataTypeMap, function::SqlFunction};
35-
36-
use parking_lot::RwLock;
37-
3837
#[pyclass(name = "SqlSchema", module = "datafusion.common", subclass, frozen)]
3938
#[derive(Debug, Clone)]
4039
pub struct SqlSchema {

src/config.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,14 +17,13 @@
1717

1818
use std::sync::Arc;
1919

20+
use datafusion::config::ConfigOptions;
21+
use parking_lot::RwLock;
2022
use pyo3::prelude::*;
2123
use pyo3::types::*;
2224

23-
use datafusion::config::ConfigOptions;
24-
2525
use crate::errors::PyDataFusionResult;
2626
use crate::utils::py_obj_to_scalar_value;
27-
use parking_lot::RwLock;
2827
#[pyclass(name = "Config", module = "datafusion", subclass, frozen)]
2928
#[derive(Clone)]
3029
pub(crate) struct PyConfig {

src/context.rs

Lines changed: 25 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -23,56 +23,53 @@ use std::sync::Arc;
2323
use arrow::array::RecordBatchReader;
2424
use arrow::ffi_stream::ArrowArrayStreamReader;
2525
use arrow::pyarrow::FromPyArrow;
26-
use datafusion::execution::session_state::SessionStateBuilder;
27-
use object_store::ObjectStore;
28-
use url::Url;
29-
use uuid::Uuid;
30-
31-
use pyo3::exceptions::{PyKeyError, PyValueError};
32-
use pyo3::prelude::*;
33-
34-
use crate::catalog::{PyCatalog, RustWrappedPyCatalogProvider};
35-
use crate::dataframe::PyDataFrame;
36-
use crate::dataset::Dataset;
37-
use crate::errors::{py_datafusion_err, PyDataFusionResult};
38-
use crate::expr::sort_expr::PySortExpr;
39-
use crate::physical_plan::PyExecutionPlan;
40-
use crate::record_batch::PyRecordBatchStream;
41-
use crate::sql::exceptions::py_value_err;
42-
use crate::sql::logical::PyLogicalPlan;
43-
use crate::store::StorageContexts;
44-
use crate::table::PyTable;
45-
use crate::udaf::PyAggregateUDF;
46-
use crate::udf::PyScalarUDF;
47-
use crate::udtf::PyTableFunction;
48-
use crate::udwf::PyWindowUDF;
49-
use crate::utils::{get_global_ctx, spawn_future, validate_pycapsule, wait_for_future};
5026
use datafusion::arrow::datatypes::{DataType, Schema, SchemaRef};
5127
use datafusion::arrow::pyarrow::PyArrowType;
5228
use datafusion::arrow::record_batch::RecordBatch;
5329
use datafusion::catalog::CatalogProvider;
54-
use datafusion::common::TableReference;
55-
use datafusion::common::{exec_err, ScalarValue};
30+
use datafusion::common::{exec_err, ScalarValue, TableReference};
5631
use datafusion::datasource::file_format::file_compression_type::FileCompressionType;
5732
use datafusion::datasource::file_format::parquet::ParquetFormat;
5833
use datafusion::datasource::listing::{
5934
ListingOptions, ListingTable, ListingTableConfig, ListingTableUrl,
6035
};
61-
use datafusion::datasource::MemTable;
62-
use datafusion::datasource::TableProvider;
36+
use datafusion::datasource::{MemTable, TableProvider};
6337
use datafusion::execution::context::{
6438
DataFilePaths, SQLOptions, SessionConfig, SessionContext, TaskContext,
6539
};
6640
use datafusion::execution::disk_manager::DiskManagerMode;
6741
use datafusion::execution::memory_pool::{FairSpillPool, GreedyMemoryPool, UnboundedMemoryPool};
6842
use datafusion::execution::options::ReadOptions;
6943
use datafusion::execution::runtime_env::RuntimeEnvBuilder;
44+
use datafusion::execution::session_state::SessionStateBuilder;
7045
use datafusion::prelude::{
7146
AvroReadOptions, CsvReadOptions, DataFrame, NdJsonReadOptions, ParquetReadOptions,
7247
};
7348
use datafusion_ffi::catalog_provider::{FFI_CatalogProvider, ForeignCatalogProvider};
49+
use object_store::ObjectStore;
50+
use pyo3::exceptions::{PyKeyError, PyValueError};
51+
use pyo3::prelude::*;
7452
use pyo3::types::{PyCapsule, PyDict, PyList, PyTuple, PyType};
7553
use pyo3::IntoPyObjectExt;
54+
use url::Url;
55+
use uuid::Uuid;
56+
57+
use crate::catalog::{PyCatalog, RustWrappedPyCatalogProvider};
58+
use crate::dataframe::PyDataFrame;
59+
use crate::dataset::Dataset;
60+
use crate::errors::{py_datafusion_err, PyDataFusionResult};
61+
use crate::expr::sort_expr::PySortExpr;
62+
use crate::physical_plan::PyExecutionPlan;
63+
use crate::record_batch::PyRecordBatchStream;
64+
use crate::sql::exceptions::py_value_err;
65+
use crate::sql::logical::PyLogicalPlan;
66+
use crate::store::StorageContexts;
67+
use crate::table::PyTable;
68+
use crate::udaf::PyAggregateUDF;
69+
use crate::udf::PyScalarUDF;
70+
use crate::udtf::PyTableFunction;
71+
use crate::udwf::PyWindowUDF;
72+
use crate::utils::{get_global_ctx, spawn_future, validate_pycapsule, wait_for_future};
7673

7774
/// Configuration options for a SessionContext
7875
#[pyclass(frozen, name = "SessionConfig", module = "datafusion", subclass)]

src/dataframe.rs

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515
// specific language governing permissions and limitations
1616
// under the License.
1717

18-
use cstr::cstr;
1918
use std::collections::HashMap;
2019
use std::ffi::{CStr, CString};
2120
use std::sync::Arc;
@@ -26,6 +25,7 @@ use arrow::error::ArrowError;
2625
use arrow::ffi::FFI_ArrowSchema;
2726
use arrow::ffi_stream::FFI_ArrowArrayStream;
2827
use arrow::pyarrow::FromPyArrow;
28+
use cstr::cstr;
2929
use datafusion::arrow::datatypes::{Schema, SchemaRef};
3030
use datafusion::arrow::pyarrow::{PyArrowType, ToPyArrow};
3131
use datafusion::arrow::util::pretty;
@@ -40,27 +40,23 @@ use datafusion::logical_expr::SortExpr;
4040
use datafusion::parquet::basic::{BrotliLevel, Compression, GzipLevel, ZstdLevel};
4141
use datafusion::prelude::*;
4242
use futures::{StreamExt, TryStreamExt};
43+
use parking_lot::Mutex;
4344
use pyo3::exceptions::PyValueError;
4445
use pyo3::prelude::*;
4546
use pyo3::pybacked::PyBackedStr;
4647
use pyo3::types::{PyCapsule, PyList, PyTuple, PyTupleMethods};
4748
use pyo3::PyErr;
4849

49-
use crate::errors::{py_datafusion_err, PyDataFusionError};
50-
use crate::expr::sort_expr::to_sort_expressions;
50+
use crate::errors::{py_datafusion_err, PyDataFusionError, PyDataFusionResult};
51+
use crate::expr::sort_expr::{to_sort_expressions, PySortExpr};
52+
use crate::expr::PyExpr;
5153
use crate::physical_plan::PyExecutionPlan;
5254
use crate::record_batch::{poll_next_batch, PyRecordBatchStream};
5355
use crate::sql::logical::PyLogicalPlan;
5456
use crate::table::{PyTable, TempViewTable};
5557
use crate::utils::{
5658
is_ipython_env, py_obj_to_scalar_value, spawn_future, validate_pycapsule, wait_for_future,
5759
};
58-
use crate::{
59-
errors::PyDataFusionResult,
60-
expr::{sort_expr::PySortExpr, PyExpr},
61-
};
62-
63-
use parking_lot::Mutex;
6460

6561
/// File-level static CStr for the Arrow array stream capsule name.
6662
static ARROW_ARRAY_STREAM_NAME: &CStr = cstr!("arrow_array_stream");

0 commit comments

Comments
 (0)