Skip to content

Commit 703b10d

Browse files
findepiDandandancomphead
authored
Apply clippy fixes for Rust 1.83 (#13596)
* Apply clippy fixes `dev/rust_lint.sh` no longer passes for me, maybe because of `rustup update`. This is first portion of fixes suggested by clippy. * Fix typo Co-authored-by: Oleks V <comphead@users.noreply.github.com> * Suppress missing docs clippy check in test code * Revert "Temporarily pin toolchain version to avoid clippy (#13598)" This reverts commit 2b37018. Toolchain pinning is no longer needed. --------- Co-authored-by: Daniël Heres <danielheres@gmail.com> Co-authored-by: Oleks V <comphead@users.noreply.github.com>
1 parent 3eebc95 commit 703b10d

File tree

74 files changed

+102
-133
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

74 files changed

+102
-133
lines changed

datafusion/catalog/src/catalog.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,6 @@ use datafusion_common::Result;
101101
/// [`UnityCatalogProvider`]: https://github.com/delta-io/delta-rs/blob/951436ecec476ce65b5ed3b58b50fb0846ca7b91/crates/deltalake-core/src/data_catalog/unity/datafusion.rs#L111-L123
102102
///
103103
/// [`TableProvider`]: crate::TableProvider
104-
105104
pub trait CatalogProvider: Debug + Sync + Send {
106105
/// Returns the catalog provider as [`Any`]
107106
/// so that it can be downcast to a specific implementation.

datafusion/common/src/column.rs

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -109,21 +109,23 @@ impl Column {
109109
/// where `"foo.BAR"` would be parsed to a reference to column named `foo.BAR`
110110
pub fn from_qualified_name(flat_name: impl Into<String>) -> Self {
111111
let flat_name = flat_name.into();
112-
Self::from_idents(&mut parse_identifiers_normalized(&flat_name, false))
113-
.unwrap_or_else(|| Self {
112+
Self::from_idents(&mut parse_identifiers_normalized(&flat_name, false)).unwrap_or(
113+
Self {
114114
relation: None,
115115
name: flat_name,
116-
})
116+
},
117+
)
117118
}
118119

119120
/// Deserialize a fully qualified name string into a column preserving column text case
120121
pub fn from_qualified_name_ignore_case(flat_name: impl Into<String>) -> Self {
121122
let flat_name = flat_name.into();
122-
Self::from_idents(&mut parse_identifiers_normalized(&flat_name, true))
123-
.unwrap_or_else(|| Self {
123+
Self::from_idents(&mut parse_identifiers_normalized(&flat_name, true)).unwrap_or(
124+
Self {
124125
relation: None,
125126
name: flat_name,
126-
})
127+
},
128+
)
127129
}
128130

129131
/// return the column's name.

datafusion/common/src/hash_utils.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ pub trait HashValue {
6363
fn hash_one(&self, state: &RandomState) -> u64;
6464
}
6565

66-
impl<'a, T: HashValue + ?Sized> HashValue for &'a T {
66+
impl<T: HashValue + ?Sized> HashValue for &T {
6767
fn hash_one(&self, state: &RandomState) -> u64 {
6868
T::hash_one(self, state)
6969
}

datafusion/common/src/utils/mod.rs

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -319,17 +319,13 @@ pub fn longest_consecutive_prefix<T: Borrow<usize>>(
319319
count
320320
}
321321

322-
/// Array Utils
323-
324322
/// Wrap an array into a single element `ListArray`.
325323
/// For example `[1, 2, 3]` would be converted into `[[1, 2, 3]]`
326324
/// The field in the list array is nullable.
327325
pub fn array_into_list_array_nullable(arr: ArrayRef) -> ListArray {
328326
array_into_list_array(arr, true)
329327
}
330328

331-
/// Array Utils
332-
333329
/// Wrap an array into a single element `ListArray`.
334330
/// For example `[1, 2, 3]` would be converted into `[[1, 2, 3]]`
335331
pub fn array_into_list_array(arr: ArrayRef, nullable: bool) -> ListArray {
@@ -569,7 +565,7 @@ pub mod datafusion_strsim {
569565

570566
struct StringWrapper<'a>(&'a str);
571567

572-
impl<'a, 'b> IntoIterator for &'a StringWrapper<'b> {
568+
impl<'b> IntoIterator for &StringWrapper<'b> {
573569
type Item = char;
574570
type IntoIter = Chars<'b>;
575571

datafusion/core/src/datasource/avro_to_arrow/arrow_array_reader.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ pub struct AvroArrowArrayReader<'a, R: Read> {
6060
schema_lookup: BTreeMap<String, usize>,
6161
}
6262

63-
impl<'a, R: Read> AvroArrowArrayReader<'a, R> {
63+
impl<R: Read> AvroArrowArrayReader<'_, R> {
6464
pub fn try_new(
6565
reader: R,
6666
schema: SchemaRef,

datafusion/core/src/datasource/avro_to_arrow/reader.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ pub struct Reader<'a, R: Read> {
128128
batch_size: usize,
129129
}
130130

131-
impl<'a, R: Read> Reader<'a, R> {
131+
impl<R: Read> Reader<'_, R> {
132132
/// Create a new Avro Reader from any value that implements the `Read` trait.
133133
///
134134
/// If reading a `File`, you can customise the Reader, such as to enable schema
@@ -157,7 +157,7 @@ impl<'a, R: Read> Reader<'a, R> {
157157
}
158158
}
159159

160-
impl<'a, R: Read> Iterator for Reader<'a, R> {
160+
impl<R: Read> Iterator for Reader<'_, R> {
161161
type Item = ArrowResult<RecordBatch>;
162162

163163
/// Returns the next batch of results (defined by `self.batch_size`), or `None` if there

datafusion/core/src/datasource/file_format/options.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ pub struct CsvReadOptions<'a> {
8989
pub file_sort_order: Vec<Vec<SortExpr>>,
9090
}
9191

92-
impl<'a> Default for CsvReadOptions<'a> {
92+
impl Default for CsvReadOptions<'_> {
9393
fn default() -> Self {
9494
Self::new()
9595
}
@@ -243,7 +243,7 @@ pub struct ParquetReadOptions<'a> {
243243
pub file_sort_order: Vec<Vec<SortExpr>>,
244244
}
245245

246-
impl<'a> Default for ParquetReadOptions<'a> {
246+
impl Default for ParquetReadOptions<'_> {
247247
fn default() -> Self {
248248
Self {
249249
file_extension: DEFAULT_PARQUET_EXTENSION,
@@ -323,7 +323,7 @@ pub struct ArrowReadOptions<'a> {
323323
pub table_partition_cols: Vec<(String, DataType)>,
324324
}
325325

326-
impl<'a> Default for ArrowReadOptions<'a> {
326+
impl Default for ArrowReadOptions<'_> {
327327
fn default() -> Self {
328328
Self {
329329
schema: None,
@@ -368,7 +368,7 @@ pub struct AvroReadOptions<'a> {
368368
pub table_partition_cols: Vec<(String, DataType)>,
369369
}
370370

371-
impl<'a> Default for AvroReadOptions<'a> {
371+
impl Default for AvroReadOptions<'_> {
372372
fn default() -> Self {
373373
Self {
374374
schema: None,
@@ -420,7 +420,7 @@ pub struct NdJsonReadOptions<'a> {
420420
pub file_sort_order: Vec<Vec<SortExpr>>,
421421
}
422422

423-
impl<'a> Default for NdJsonReadOptions<'a> {
423+
impl Default for NdJsonReadOptions<'_> {
424424
fn default() -> Self {
425425
Self {
426426
schema: None,

datafusion/core/src/datasource/file_format/parquet.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -477,7 +477,7 @@ impl<'a> ObjectStoreFetch<'a> {
477477
}
478478
}
479479

480-
impl<'a> MetadataFetch for ObjectStoreFetch<'a> {
480+
impl MetadataFetch for ObjectStoreFetch<'_> {
481481
fn fetch(
482482
&mut self,
483483
range: Range<usize>,

datafusion/core/src/datasource/listing/helpers.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,7 @@ pub fn split_files(
135135
partitioned_files.sort_by(|a, b| a.path().cmp(b.path()));
136136

137137
// effectively this is div with rounding up instead of truncating
138-
let chunk_size = (partitioned_files.len() + n - 1) / n;
138+
let chunk_size = partitioned_files.len().div_ceil(n);
139139
let mut chunks = Vec::with_capacity(n);
140140
let mut current_chunk = Vec::with_capacity(chunk_size);
141141
for file in partitioned_files.drain(..) {

datafusion/core/src/datasource/physical_plan/file_groups.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -217,8 +217,7 @@ impl FileGroupPartitioner {
217217
return None;
218218
}
219219

220-
let target_partition_size =
221-
(total_size as usize + (target_partitions) - 1) / (target_partitions);
220+
let target_partition_size = (total_size as usize).div_ceil(target_partitions);
222221

223222
let current_partition_index: usize = 0;
224223
let current_partition_size: usize = 0;

0 commit comments

Comments
 (0)