Skip to content

Commit 6ebf9fe

Browse files
committed
[SPARK-51519][SQL] MERGE INTO/UPDATE/DELETE support join hint
1 parent 984d578 commit 6ebf9fe

File tree

11 files changed

+3908
-64
lines changed

11 files changed

+3908
-64
lines changed

sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -650,7 +650,7 @@ dmlStatementNoWith
650650
| fromClause multiInsertQueryBody+ #multiInsertQuery
651651
| DELETE FROM identifierReference tableAlias whereClause? #deleteFromTable
652652
| UPDATE identifierReference tableAlias setClause whereClause? #updateTable
653-
| MERGE (WITH SCHEMA EVOLUTION)? INTO target=identifierReference targetAlias=tableAlias
653+
| MERGE (hints+=hint)* (WITH SCHEMA EVOLUTION)? INTO target=identifierReference targetAlias=tableAlias
654654
USING (source=identifierReference |
655655
LEFT_PAREN sourceQuery=query RIGHT_PAREN) sourceAlias=tableAlias
656656
ON mergeCondition=booleanExpression

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/RewriteMergeIntoTable.scala

Lines changed: 103 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import org.apache.spark.sql.catalyst.expressions.{Alias, And, Attribute, Attribu
2222
import org.apache.spark.sql.catalyst.expressions.Literal.{FalseLiteral, TrueLiteral}
2323
import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
2424
import org.apache.spark.sql.catalyst.plans.{FullOuter, Inner, JoinType, LeftAnti, LeftOuter, RightOuter}
25-
import org.apache.spark.sql.catalyst.plans.logical.{AppendData, DeleteAction, Filter, HintInfo, InsertAction, Join, JoinHint, LogicalPlan, MergeAction, MergeIntoTable, MergeRows, NO_BROADCAST_AND_REPLICATION, Project, ReplaceData, UpdateAction, WriteDelta}
25+
import org.apache.spark.sql.catalyst.plans.logical.{AppendData, DeleteAction, Filter, HintInfo, InsertAction, Join, JoinHint, LogicalPlan, MergeAction, MergeIntoTable, MergeRows, NO_BROADCAST_AND_REPLICATION, Project, ReplaceData, ResolvedHint, UpdateAction, WriteDelta}
2626
import org.apache.spark.sql.catalyst.plans.logical.MergeRows.{Copy, Delete, Discard, Insert, Instruction, Keep, ROW_ID, Split, Update}
2727
import org.apache.spark.sql.catalyst.util.RowDeltaUtils.{OPERATION_COLUMN, WRITE_OPERATION, WRITE_WITH_METADATA_OPERATION}
2828
import org.apache.spark.sql.connector.catalog.SupportsRowLevelOperations
@@ -52,27 +52,11 @@ object RewriteMergeIntoTable extends RewriteRowLevelCommand with PredicateHelper
5252
EliminateSubqueryAliases(aliasedTable) match {
5353
case r: DataSourceV2Relation =>
5454
validateMergeIntoConditions(m)
55+
buildAppendDataPlan(r, r, source, cond, notMatchedActions)
5556

56-
// NOT MATCHED conditions may only refer to columns in source so they can be pushed down
57-
val insertAction = notMatchedActions.head.asInstanceOf[InsertAction]
58-
val filteredSource = insertAction.condition match {
59-
case Some(insertCond) => Filter(insertCond, source)
60-
case None => source
61-
}
62-
63-
// there is only one NOT MATCHED action, use a left anti join to remove any matching rows
64-
// and switch to using a regular append instead of a row-level MERGE operation
65-
// only unmatched source rows that match the condition are appended to the table
66-
val joinPlan = Join(filteredSource, r, LeftAnti, Some(cond), JoinHint.NONE)
67-
68-
val output = insertAction.assignments.map(_.value)
69-
val outputColNames = r.output.map(_.name)
70-
val projectList = output.zip(outputColNames).map { case (expr, name) =>
71-
Alias(expr, name)()
72-
}
73-
val project = Project(projectList, joinPlan)
74-
75-
AppendData.byPosition(r, project)
57+
case h @ ResolvedHint(r: DataSourceV2Relation, _) =>
58+
validateMergeIntoConditions(m)
59+
buildAppendDataPlan(r, h, source, cond, notMatchedActions)
7660

7761
case _ =>
7862
m
@@ -86,35 +70,11 @@ object RewriteMergeIntoTable extends RewriteRowLevelCommand with PredicateHelper
8670
EliminateSubqueryAliases(aliasedTable) match {
8771
case r: DataSourceV2Relation =>
8872
validateMergeIntoConditions(m)
73+
buildAppendDataPlanForMultipleNotMatchedActions(r, r, source, cond, notMatchedActions)
8974

90-
// there are only NOT MATCHED actions, use a left anti join to remove any matching rows
91-
// and switch to using a regular append instead of a row-level MERGE operation
92-
// only unmatched source rows that match action conditions are appended to the table
93-
val joinPlan = Join(source, r, LeftAnti, Some(cond), JoinHint.NONE)
94-
95-
val notMatchedInstructions = notMatchedActions.map {
96-
case InsertAction(cond, assignments) =>
97-
Keep(Insert, cond.getOrElse(TrueLiteral), assignments.map(_.value))
98-
case other =>
99-
throw new AnalysisException(
100-
errorClass = "_LEGACY_ERROR_TEMP_3053",
101-
messageParameters = Map("other" -> other.toString))
102-
}
103-
104-
val outputs = notMatchedInstructions.flatMap(_.outputs)
105-
106-
// merge rows as there are multiple NOT MATCHED actions
107-
val mergeRows = MergeRows(
108-
isSourceRowPresent = TrueLiteral,
109-
isTargetRowPresent = FalseLiteral,
110-
matchedInstructions = Nil,
111-
notMatchedInstructions = notMatchedInstructions,
112-
notMatchedBySourceInstructions = Nil,
113-
checkCardinality = false,
114-
output = generateExpandOutput(r.output, outputs),
115-
joinPlan)
116-
117-
AppendData.byPosition(r, mergeRows)
75+
case h @ ResolvedHint(r: DataSourceV2Relation, _) =>
76+
validateMergeIntoConditions(m)
77+
buildAppendDataPlanForMultipleNotMatchedActions(r, h, source, cond, notMatchedActions)
11878

11979
case _ =>
12080
m
@@ -139,11 +99,92 @@ object RewriteMergeIntoTable extends RewriteRowLevelCommand with PredicateHelper
13999
notMatchedActions, notMatchedBySourceActions)
140100
}
141101

102+
case h @ ResolvedHint(
103+
r @ DataSourceV2Relation(tbl: SupportsRowLevelOperations, _, _, _, _), _) =>
104+
validateMergeIntoConditions(m)
105+
val table = buildOperationTable(tbl, MERGE, CaseInsensitiveStringMap.empty())
106+
table.operation match {
107+
case _: SupportsDelta =>
108+
buildWriteDeltaPlan(
109+
r, table, source, cond, matchedActions,
110+
notMatchedActions, notMatchedBySourceActions, Some(h))
111+
case _ =>
112+
buildReplaceDataPlan(
113+
r, table, source, cond, matchedActions,
114+
notMatchedActions, notMatchedBySourceActions, Some(h))
115+
}
116+
142117
case _ =>
143118
m
144119
}
145120
}
146121

122+
// build a rewrite plan for sources that support appending data
123+
private def buildAppendDataPlan(
124+
relation: DataSourceV2Relation,
125+
target: LogicalPlan,
126+
source: LogicalPlan,
127+
cond: Expression,
128+
notMatchedActions: Seq[MergeAction]): AppendData = {
129+
// NOT MATCHED conditions may only refer to columns in source so they can be pushed down
130+
val insertAction = notMatchedActions.head.asInstanceOf[InsertAction]
131+
val filteredSource = insertAction.condition match {
132+
case Some(insertCond) => Filter(insertCond, source)
133+
case None => source
134+
}
135+
136+
// there is only one NOT MATCHED action, use a left anti join to remove any matching rows
137+
// and switch to using a regular append instead of a row-level MERGE operation
138+
// only unmatched source rows that match the condition are appended to the table
139+
val joinPlan = Join(filteredSource, target, LeftAnti, Some(cond), JoinHint.NONE)
140+
141+
val output = insertAction.assignments.map(_.value)
142+
val outputColNames = relation.output.map(_.name)
143+
val projectList = output.zip(outputColNames).map { case (expr, name) =>
144+
Alias(expr, name)()
145+
}
146+
val project = Project(projectList, joinPlan)
147+
148+
AppendData.byPosition(relation, project)
149+
}
150+
151+
// build a rewrite plan for sources that support appending data have multiple not matched actions
152+
private def buildAppendDataPlanForMultipleNotMatchedActions(
153+
relation: DataSourceV2Relation,
154+
target: LogicalPlan,
155+
source: LogicalPlan,
156+
cond: Expression,
157+
notMatchedActions: Seq[MergeAction]): AppendData = {
158+
// there are only NOT MATCHED actions, use a left anti join to remove any matching rows
159+
// and switch to using a regular append instead of a row-level MERGE operation
160+
// only unmatched source rows that match action conditions are appended to the table
161+
val joinPlan = Join(source, target, LeftAnti, Some(cond), JoinHint.NONE)
162+
163+
val notMatchedInstructions = notMatchedActions.map {
164+
case InsertAction(cond, assignments) =>
165+
Keep(Insert, cond.getOrElse(TrueLiteral), assignments.map(_.value))
166+
case other =>
167+
throw new AnalysisException(
168+
errorClass = "_LEGACY_ERROR_TEMP_3053",
169+
messageParameters = Map("other" -> other.toString))
170+
}
171+
172+
val outputs = notMatchedInstructions.flatMap(_.outputs)
173+
174+
// merge rows as there are multiple NOT MATCHED actions
175+
val mergeRows = MergeRows(
176+
isSourceRowPresent = TrueLiteral,
177+
isTargetRowPresent = FalseLiteral,
178+
matchedInstructions = Nil,
179+
notMatchedInstructions = notMatchedInstructions,
180+
notMatchedBySourceInstructions = Nil,
181+
checkCardinality = false,
182+
output = generateExpandOutput(relation.output, outputs),
183+
joinPlan)
184+
185+
AppendData.byPosition(relation, mergeRows)
186+
}
187+
147188
// build a rewrite plan for sources that support replacing groups of data (e.g. files, partitions)
148189
private def buildReplaceDataPlan(
149190
relation: DataSourceV2Relation,
@@ -152,7 +193,8 @@ object RewriteMergeIntoTable extends RewriteRowLevelCommand with PredicateHelper
152193
cond: Expression,
153194
matchedActions: Seq[MergeAction],
154195
notMatchedActions: Seq[MergeAction],
155-
notMatchedBySourceActions: Seq[MergeAction]): ReplaceData = {
196+
notMatchedBySourceActions: Seq[MergeAction],
197+
hintOption: Option[ResolvedHint] = None): ReplaceData = {
156198

157199
// resolve all required metadata attrs that may be used for grouping data on write
158200
// for instance, JDBC data source may cluster data by shard/host before writing
@@ -161,12 +203,16 @@ object RewriteMergeIntoTable extends RewriteRowLevelCommand with PredicateHelper
161203
// construct a read relation and include all required metadata columns
162204
val readRelation = buildRelationWithAttrs(relation, operationTable, metadataAttrs)
163205

206+
val target = hintOption.map { resolvedHint =>
207+
resolvedHint.withNewChildren(Seq(readRelation))
208+
}.getOrElse(readRelation)
209+
164210
val checkCardinality = shouldCheckCardinality(matchedActions)
165211

166212
// use left outer join if there is no NOT MATCHED action, unmatched source rows can be discarded
167213
// use full outer join in all other cases, unmatched source rows may be needed
168214
val joinType = if (notMatchedActions.isEmpty) LeftOuter else FullOuter
169-
val joinPlan = join(readRelation, source, joinType, cond, checkCardinality)
215+
val joinPlan = join(target, source, joinType, cond, checkCardinality)
170216

171217
val mergeRowsPlan = buildReplaceDataMergeRowsPlan(
172218
readRelation, joinPlan, matchedActions, notMatchedActions,
@@ -260,7 +306,8 @@ object RewriteMergeIntoTable extends RewriteRowLevelCommand with PredicateHelper
260306
cond: Expression,
261307
matchedActions: Seq[MergeAction],
262308
notMatchedActions: Seq[MergeAction],
263-
notMatchedBySourceActions: Seq[MergeAction]): WriteDelta = {
309+
notMatchedBySourceActions: Seq[MergeAction],
310+
hintOption: Option[ResolvedHint] = None): WriteDelta = {
264311

265312
val operation = operationTable.operation.asInstanceOf[SupportsDelta]
266313

@@ -279,11 +326,14 @@ object RewriteMergeIntoTable extends RewriteRowLevelCommand with PredicateHelper
279326
} else {
280327
(readRelation, cond)
281328
}
329+
val target = hintOption.map { resolvedHint =>
330+
resolvedHint.withNewChildren(Seq(filteredReadRelation))
331+
}.getOrElse(filteredReadRelation)
282332

283333
val checkCardinality = shouldCheckCardinality(matchedActions)
284334

285335
val joinType = chooseWriteDeltaJoinType(notMatchedActions, notMatchedBySourceActions)
286-
val joinPlan = join(filteredReadRelation, source, joinType, joinCond, checkCardinality)
336+
val joinPlan = join(target, source, joinType, joinCond, checkCardinality)
287337

288338
val mergeRowsPlan = buildWriteDeltaMergeRowsPlan(
289339
readRelation, joinPlan, matchedActions, notMatchedActions,

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1141,14 +1141,15 @@ class AstBuilder extends DataTypeAstBuilder
11411141
matchedActions, notMatchedActions, notMatchedBySourceActions))
11421142
val targetTableAlias = getTableAliasWithoutColumnAlias(ctx.targetAlias, "MERGE")
11431143
val aliasedTarget = targetTableAlias.map(SubqueryAlias(_, targetTable)).getOrElse(targetTable)
1144-
MergeIntoTable(
1144+
val plan: LogicalPlan = MergeIntoTable(
11451145
aliasedTarget,
11461146
aliasedSource,
11471147
mergeCondition,
11481148
matchedActions,
11491149
notMatchedActions,
11501150
notMatchedBySourceActions,
11511151
withSchemaEvolution)
1152+
ctx.hints.asScala.foldRight(plan)(withHints)
11521153
}
11531154

11541155
/**

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -879,6 +879,7 @@ case class MergeIntoTable(
879879
lazy val rewritable: Boolean = {
880880
EliminateSubqueryAliases(targetTable) match {
881881
case DataSourceV2Relation(_: SupportsRowLevelOperations, _, _, _, _) => true
882+
case ResolvedHint(DataSourceV2Relation(_: SupportsRowLevelOperations, _, _, _, _), _) => true
882883
case _ => false
883884
}
884885
}

sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/InvokeProcedures.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,12 +54,14 @@ class InvokeProcedures(session: SparkSession) extends Rule[LogicalPlan] {
5454
CommandResult(
5555
Seq.empty,
5656
call,
57+
call,
5758
LocalTableScanExec(Seq.empty, Seq.empty, None),
5859
Seq.empty)
5960
case Seq(relation: LocalRelation) =>
6061
CommandResult(
6162
relation.output,
6263
call,
64+
call,
6365
LocalTableScanExec(relation.output, relation.data, None),
6466
relation.data)
6567
case _ =>

sql/core/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/CommandResult.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ import org.apache.spark.sql.execution.SparkPlan
3434
case class CommandResult(
3535
output: Seq[Attribute],
3636
@transient commandLogicalPlan: LogicalPlan,
37+
@transient commandOptimizedLogicalPlan: LogicalPlan,
3738
@transient commandPhysicalPlan: SparkPlan,
3839
@transient rows: Seq[InternalRow]) extends LeafNode {
3940
override def innerChildren: Seq[QueryPlan[_]] = Seq(commandLogicalPlan)

sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -160,6 +160,7 @@ class QueryExecution(
160160
CommandResult(
161161
qe.analyzed.output,
162162
qe.commandExecuted,
163+
qe.optimizedPlan,
163164
qe.executedPlan,
164165
result.toImmutableArraySeq)
165166
}

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1031,7 +1031,7 @@ abstract class SparkStrategies extends QueryPlanner[SparkPlan] {
10311031
case logical.LocalRelation(output, data, _, stream) =>
10321032
LocalTableScanExec(output, data, stream) :: Nil
10331033
case logical.EmptyRelation(l) => EmptyRelationExec(l) :: Nil
1034-
case CommandResult(output, _, plan, data) => CommandResultExec(output, plan, data) :: Nil
1034+
case CommandResult(output, _, _, plan, data) => CommandResultExec(output, plan, data) :: Nil
10351035
// We should match the combination of limit and offset first, to get the optimal physical
10361036
// plan, instead of planning limit and offset separately.
10371037
case LimitAndOffset(limit, offset, child) =>

sql/core/src/test/resources/sql-tests/analyzer-results/execute-immediate.sql.out

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -48,14 +48,14 @@ SetVariable [variablereference(system.session.sql_string=CAST(NULL AS STRING))]
4848
-- !query
4949
EXECUTE IMMEDIATE 'SET spark.sql.ansi.enabled=true'
5050
-- !query analysis
51-
CommandResult [key#x, value#x], Execute SetCommand, [[spark.sql.ansi.enabled,true]]
51+
CommandResult [key#x, value#x], SetCommand (spark.sql.ansi.enabled,Some(true)), Execute SetCommand, [[spark.sql.ansi.enabled,true]]
5252
+- SetCommand (spark.sql.ansi.enabled,Some(true))
5353

5454

5555
-- !query
5656
EXECUTE IMMEDIATE 'CREATE TEMPORARY VIEW IDENTIFIER(:tblName) AS SELECT id, name FROM tbl_view' USING 'tbl_view_tmp' as tblName
5757
-- !query analysis
58-
CommandResult Execute CreateViewCommand
58+
CommandResult CreateViewCommand `tbl_view_tmp`, SELECT id, name FROM tbl_view, false, false, LocalTempView, UNSUPPORTED, true, Execute CreateViewCommand
5959
+- CreateViewCommand `tbl_view_tmp`, SELECT id, name FROM tbl_view, false, false, LocalTempView, UNSUPPORTED, true
6060
+- Project [id#x, name#x]
6161
+- SubqueryAlias tbl_view
@@ -85,7 +85,7 @@ Project [id#x, name#x]
8585
-- !query
8686
EXECUTE IMMEDIATE 'REFRESH TABLE IDENTIFIER(:tblName)' USING 'x' as tblName
8787
-- !query analysis
88-
CommandResult Execute RefreshTableCommand
88+
CommandResult RefreshTableCommand `spark_catalog`.`default`.`x`, Execute RefreshTableCommand
8989
+- RefreshTableCommand `spark_catalog`.`default`.`x`
9090

9191

@@ -206,7 +206,7 @@ Project [id#x, name#x, data#x]
206206
-- !query
207207
EXECUTE IMMEDIATE 'INSERT INTO x VALUES(?)' USING 1
208208
-- !query analysis
209-
CommandResult Execute InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/x, false, CSV, [path=file:[not included in comparison]/{warehouse_dir}/x], Append, `spark_catalog`.`default`.`x`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/x), [id]
209+
CommandResult InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/x, false, CSV, [path=file:[not included in comparison]/{warehouse_dir}/x], Append, `spark_catalog`.`default`.`x`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/x), [id], Execute InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/x, false, CSV, [path=file:[not included in comparison]/{warehouse_dir}/x], Append, `spark_catalog`.`default`.`x`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/x), [id]
210210
+- InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/x, false, CSV, [path=file:[not included in comparison]/{warehouse_dir}/x], Append, `spark_catalog`.`default`.`x`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/x), [id]
211211
+- Project [col1#x AS id#x]
212212
+- LocalRelation [col1#x]
@@ -311,7 +311,7 @@ Project [id#x, name#x, data#x, name7 AS p#x]
311311
-- !query
312312
EXECUTE IMMEDIATE 'SET VAR sql_string = ?' USING 'SELECT id from tbl_view where name = :first'
313313
-- !query analysis
314-
CommandResult SetVariable [variablereference(system.session.sql_string='SELECT * from tbl_view where name = :first or id = :second')]
314+
CommandResult SetVariable [variablereference(system.session.sql_string='SELECT * from tbl_view where name = :first or id = :second')], SetVariable [variablereference(system.session.sql_string='SELECT * from tbl_view where name = :first or id = :second')]
315315
+- SetVariable [variablereference(system.session.sql_string='SELECT * from tbl_view where name = :first or id = :second')]
316316
+- Project [SELECT id from tbl_view where name = :first AS sql_string#x]
317317
+- OneRowRelation

sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2OptionSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ class DataSourceV2OptionSuite extends DatasourceV2SQLBase {
102102
val df = sql(s"INSERT INTO $t1 WITH (`write.split-size` = 10) VALUES (1, 'a'), (2, 'b')")
103103

104104
var collected = df.queryExecution.optimizedPlan.collect {
105-
case CommandResult(_, AppendData(relation: DataSourceV2Relation, _, _, _, _, _), _, _) =>
105+
case CommandResult(_, AppendData(relation: DataSourceV2Relation, _, _, _, _, _), _, _, _) =>
106106
assert(relation.options.get("write.split-size") == "10")
107107
}
108108
assert (collected.size == 1)
@@ -187,7 +187,7 @@ class DataSourceV2OptionSuite extends DatasourceV2SQLBase {
187187
var collected = df.queryExecution.optimizedPlan.collect {
188188
case CommandResult(_,
189189
OverwriteByExpression(relation: DataSourceV2Relation, _, _, _, _, _, _),
190-
_, _) =>
190+
_, _, _) =>
191191
assert(relation.options.get("write.split-size") === "10")
192192
}
193193
assert (collected.size == 1)
@@ -247,7 +247,7 @@ class DataSourceV2OptionSuite extends DatasourceV2SQLBase {
247247
var collected = df.queryExecution.optimizedPlan.collect {
248248
case CommandResult(_,
249249
OverwriteByExpression(relation: DataSourceV2Relation, _, _, _, _, _, _),
250-
_, _) =>
250+
_, _, _) =>
251251
assert(relation.options.get("write.split-size") == "10")
252252
}
253253
assert (collected.size == 1)

0 commit comments

Comments
 (0)