Skip to content

Commit 79d82bc

Browse files
committed
fix some
Signed-off-by: Ruihang Xia <waynestxia@gmail.com>
1 parent 183a862 commit 79d82bc

File tree

14 files changed

+34
-30
lines changed

14 files changed

+34
-30
lines changed

datafusion/core/benches/spm.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ fn generate_spm_for_round_robin_tie_breaker(
6666
};
6767

6868
let rbs = (0..batch_count).map(|_| rb.clone()).collect::<Vec<_>>();
69-
let partitiones = vec![rbs.clone(); partition_count];
69+
let partitions = vec![rbs.clone(); partition_count];
7070

7171
let schema = rb.schema();
7272
let sort = [
@@ -81,7 +81,7 @@ fn generate_spm_for_round_robin_tie_breaker(
8181
]
8282
.into();
8383

84-
let exec = MemorySourceConfig::try_new_exec(&partitiones, schema, None).unwrap();
84+
let exec = MemorySourceConfig::try_new_exec(&partitions, schema, None).unwrap();
8585
SortPreservingMergeExec::new(sort, exec)
8686
.with_round_robin_repartition(enable_round_robin_repartition)
8787
}

datafusion/core/src/datasource/listing/table.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -502,7 +502,7 @@ impl ListingOptions {
502502
///
503503
/// Currently this sets `target_partitions` and `collect_stat`
504504
/// but if more options are added in the future that need to be coordinated
505-
/// they will be synchronized thorugh this method.
505+
/// they will be synchronized through this method.
506506
pub fn with_session_config_options(mut self, config: &SessionConfig) -> Self {
507507
self = self.with_target_partitions(config.target_partitions());
508508
self = self.with_collect_stat(config.collect_statistics());

datafusion/core/tests/dataframe/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -92,8 +92,8 @@ async fn physical_plan_to_string(df: &DataFrame) -> String {
9292
.await
9393
.expect("Error creating physical plan");
9494

95-
let formated = displayable(physical_plan.as_ref()).indent(true);
96-
formated.to_string()
95+
let formatted = displayable(physical_plan.as_ref()).indent(true);
96+
formatted.to_string()
9797
}
9898

9999
pub fn table_with_constraints() -> Arc<dyn TableProvider> {

datafusion/core/tests/physical_optimizer/replace_with_order_preserving_variants.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -667,7 +667,7 @@ async fn test_not_replacing_when_no_need_to_preserve_sorting(
667667

668668
#[rstest]
669669
#[tokio::test]
670-
async fn test_with_multiple_replacable_repartitions(
670+
async fn test_with_multiple_replaceable_repartitions(
671671
#[values(false, true)] source_unbounded: bool,
672672
#[values(false, true)] prefer_existing_sort: bool,
673673
) -> Result<()> {

datafusion/core/tests/physical_optimizer/window_optimize.rs

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -42,10 +42,14 @@ mod test {
4242
.schema(schema.clone())
4343
.alias("t")
4444
.build()?;
45-
let parition = [col("a", &schema)?];
45+
let partition = [col("a", &schema)?];
4646
let frame = WindowFrame::new(None);
47-
let plain =
48-
PlainAggregateWindowExpr::new(Arc::new(cnt), &parition, &[], Arc::new(frame));
47+
let plain = PlainAggregateWindowExpr::new(
48+
Arc::new(cnt),
49+
&partition,
50+
&[],
51+
Arc::new(frame),
52+
);
4953

5054
let bounded_agg_exec = BoundedWindowAggExec::try_new(
5155
vec![Arc::new(plain)],

datafusion/core/tests/user_defined/user_defined_scalar_functions.rs

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1445,7 +1445,7 @@ impl ScalarUDFImpl for MetadataBasedUdf {
14451445
.get("modify_values")
14461446
.map(|v| v == "double_output")
14471447
.unwrap_or(false);
1448-
let mulitplier = if should_double { 2 } else { 1 };
1448+
let multiplier = if should_double { 2 } else { 1 };
14491449

14501450
match &args.args[0] {
14511451
ColumnarValue::Array(array) => {
@@ -1454,7 +1454,7 @@ impl ScalarUDFImpl for MetadataBasedUdf {
14541454
.downcast_ref::<UInt64Array>()
14551455
.unwrap()
14561456
.iter()
1457-
.map(|v| v.map(|x| x * mulitplier))
1457+
.map(|v| v.map(|x| x * multiplier))
14581458
.collect();
14591459
let array_ref = Arc::new(UInt64Array::from(array_values)) as ArrayRef;
14601460
Ok(ColumnarValue::Array(array_ref))
@@ -1465,7 +1465,7 @@ impl ScalarUDFImpl for MetadataBasedUdf {
14651465
};
14661466

14671467
Ok(ColumnarValue::Scalar(ScalarValue::UInt64(
1468-
value.map(|v| v * mulitplier),
1468+
value.map(|v| v * multiplier),
14691469
)))
14701470
}
14711471
}
@@ -1643,7 +1643,7 @@ impl ScalarUDFImpl for ExtensionBasedUdf {
16431643

16441644
fn return_field_from_args(&self, _args: ReturnFieldArgs) -> Result<FieldRef> {
16451645
Ok(Field::new("canonical_extension_udf", DataType::Utf8, true)
1646-
.with_extension_type(MyUserExtentionType {})
1646+
.with_extension_type(MyUserExtensionType {})
16471647
.into())
16481648
}
16491649

@@ -1691,10 +1691,10 @@ impl ScalarUDFImpl for ExtensionBasedUdf {
16911691
}
16921692
}
16931693

1694-
struct MyUserExtentionType {}
1694+
struct MyUserExtensionType {}
16951695

1696-
impl ExtensionType for MyUserExtentionType {
1697-
const NAME: &'static str = "my_user_extention_type";
1696+
impl ExtensionType for MyUserExtensionType {
1697+
const NAME: &'static str = "my_user_Extension_type";
16981698
type Metadata = ();
16991699

17001700
fn metadata(&self) -> &Self::Metadata {
@@ -1766,9 +1766,9 @@ async fn test_extension_based_udf() -> Result<()> {
17661766
// To test for input extensions handling, we check the strings returned
17671767
let expected_schema = Schema::new(vec![
17681768
Field::new("without_bool8_extension", DataType::Utf8, true)
1769-
.with_extension_type(MyUserExtentionType {}),
1769+
.with_extension_type(MyUserExtensionType {}),
17701770
Field::new("with_bool8_extension", DataType::Utf8, true)
1771-
.with_extension_type(MyUserExtentionType {}),
1771+
.with_extension_type(MyUserExtensionType {}),
17721772
]);
17731773

17741774
let expected = record_batch!(

datafusion/datasource-parquet/src/opener.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1215,7 +1215,7 @@ mod test {
12151215
}
12161216
}
12171217

1218-
// Test that if no expression rewriter is provided we use a schemaadapter to adapt the data to the expresssion
1218+
// Test that if no expression rewriter is provided we use a schemaadapter to adapt the data to the expression
12191219
let store = Arc::new(InMemory::new()) as Arc<dyn ObjectStore>;
12201220
let batch = record_batch!(("a", Int32, vec![Some(1), Some(2), Some(3)])).unwrap();
12211221
// Write out the batch to a Parquet file

datafusion/datasource-parquet/src/row_filter.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -184,7 +184,7 @@ pub(crate) struct FilterCandidate {
184184
/// Can this filter use an index (e.g. a page index) to prune rows?
185185
can_use_index: bool,
186186
/// The projection to read from the file schema to get the columns
187-
/// required to pass thorugh a `SchemaMapper` to the table schema
187+
/// required to pass through a `SchemaMapper` to the table schema
188188
/// upon which we then evaluate the filter expression.
189189
projection: Vec<usize>,
190190
/// A `SchemaMapper` used to map batches read from the file schema to

datafusion/datasource-parquet/src/source.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -492,7 +492,7 @@ impl FileSource for ParquetSource {
492492
}
493493
(None, Some(schema_adapter_factory)) => {
494494
// If a custom schema adapter factory is provided but no expr adapter factory is provided use the custom SchemaAdapter for both projections and predicate pushdown.
495-
// This maximizes compatiblity with existing code that uses the SchemaAdapter API and did not explicitly opt into the PhysicalExprAdapterFactory API.
495+
// This maximizes compatibility with existing code that uses the SchemaAdapter API and did not explicitly opt into the PhysicalExprAdapterFactory API.
496496
(None, Arc::clone(schema_adapter_factory) as _)
497497
}
498498
(None, None) => {

datafusion/functions/src/core/union_tag.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,7 @@ impl ScalarUDFImpl for UnionTagFunc {
136136
})
137137
.ok_or_else(|| {
138138
exec_datafusion_err!(
139-
"union_tag: union scalar with unknow type_id {value_type_id}"
139+
"union_tag: union scalar with unknown type_id {value_type_id}"
140140
)
141141
}),
142142
None => Ok(ColumnarValue::Scalar(ScalarValue::try_new_null(

0 commit comments

Comments
 (0)