flink Table 의 Orderby 및 Limit 에 대해 이야기 합 니 다.
6231 단어 flink
본 고 는 주로 flink Table 의 Orderby 와 Limit 를 연구 하고 자 한다.
실례
Table in = tableEnv.fromDataSet(ds, "a, b, c");
Table result = in.orderBy("a.asc");
Table in = tableEnv.fromDataSet(ds, "a, b, c");
// returns the first 5 records from the sorted result
Table result1 = in.orderBy("a.asc").fetch(5);
// skips the first 3 records and returns all following records from the sorted result
Table result2 = in.orderBy("a.asc").offset(3);
// skips the first 10 records and returns the next 5 records from the sorted result
Table result3 = in.orderBy("a.asc").offset(10).fetch(5);
flink-table_2.11-1.7.0-sources.jar!/org/apache/flink/table/api/table.scala
class Table(
private[flink] val tableEnv: TableEnvironment,
private[flink] val logicalPlan: LogicalNode) {
//......
def orderBy(fields: String): Table = {
val parsedFields = ExpressionParser.parseExpressionList(fields)
orderBy(parsedFields: _*)
}
def orderBy(fields: Expression*): Table = {
val order: Seq[Ordering] = fields.map {
case o: Ordering => o
case e => Asc(e)
}
new Table(tableEnv, Sort(order, logicalPlan).validate(tableEnv))
}
def offset(offset: Int): Table = {
new Table(tableEnv, Limit(offset, -1, logicalPlan).validate(tableEnv))
}
def fetch(fetch: Int): Table = {
if (fetch < 0) {
throw new ValidationException("FETCH count must be equal or larger than 0.")
}
this.logicalPlan match {
case Limit(o, -1, c) =>
// replace LIMIT without FETCH by LIMIT with FETCH
new Table(tableEnv, Limit(o, fetch, c).validate(tableEnv))
case Limit(_, _, _) =>
throw new ValidationException("FETCH is already defined.")
case _ =>
new Table(tableEnv, Limit(0, fetch, logicalPlan).validate(tableEnv))
}
}
//......
}
offset Limit fetch -1;fetch offset Limit offset 0
Sort
flink-table_2.11-1.7.0-sources.jar!/org/apache/flink/table/plan/logical/operators.scala
case class Sort(order: Seq[Ordering], child: LogicalNode) extends UnaryNode {
override def output: Seq[Attribute] = child.output
override protected[logical] def construct(relBuilder: RelBuilder): RelBuilder = {
child.construct(relBuilder)
relBuilder.sort(order.map(_.toRexNode(relBuilder)).asJava)
}
override def validate(tableEnv: TableEnvironment): LogicalNode = {
if (tableEnv.isInstanceOf[StreamTableEnvironment]) {
failValidation(s"Sort on stream tables is currently not supported.")
}
super.validate(tableEnv)
}
}
Ordering
flink-table_2.11-1.7.0-sources.jar!/org/apache/flink/table/expressions/ordering.scala
abstract class Ordering extends UnaryExpression {
override private[flink] def validateInput(): ValidationResult = {
if (!child.isInstanceOf[NamedExpression]) {
ValidationFailure(s"Sort should only based on field reference")
} else {
ValidationSuccess
}
}
}
case class Asc(child: Expression) extends Ordering {
override def toString: String = s"($child).asc"
override private[flink] def toRexNode(implicit relBuilder: RelBuilder): RexNode = {
child.toRexNode
}
override private[flink] def resultType: TypeInformation[_] = child.resultType
}
case class Desc(child: Expression) extends Ordering {
override def toString: String = s"($child).desc"
override private[flink] def toRexNode(implicit relBuilder: RelBuilder): RexNode = {
relBuilder.desc(child.toRexNode)
}
override private[flink] def resultType: TypeInformation[_] = child.resultType
}
Limit
flink-table_2.11-1.7.0-sources.jar!/org/apache/flink/table/plan/logical/operators.scala
case class Limit(offset: Int, fetch: Int = -1, child: LogicalNode) extends UnaryNode {
override def output: Seq[Attribute] = child.output
override protected[logical] def construct(relBuilder: RelBuilder): RelBuilder = {
child.construct(relBuilder)
relBuilder.limit(offset, fetch)
}
override def validate(tableEnv: TableEnvironment): LogicalNode = {
if (tableEnv.isInstanceOf[StreamTableEnvironment]) {
failValidation(s"Limit on stream tables is currently not supported.")
}
if (!child.isInstanceOf[Sort]) {
failValidation(s"Limit operator must be preceded by an OrderBy operator.")
}
if (offset < 0) {
failValidation(s"Offset should be greater than or equal to zero.")
}
super.validate(tableEnv)
}
}
작은 매듭
offset Limit fetch -1;fetch offset Limit offset 0
doc
이 내용에 흥미가 있습니까?
현재 기사가 여러분의 문제를 해결하지 못하는 경우 AI 엔진은 머신러닝 분석(스마트 모델이 방금 만들어져 부정확한 경우가 있을 수 있음)을 통해 가장 유사한 기사를 추천합니다:
[case52] flink Keyed Stream의 aggregation 작업에 대해 이야기합니다.flink-streaming-java_2.11-1.7.0-sources.jar!/org/apache/flink/streaming/api/datastream/KeyedStream.java Keyed Stream의agg...
텍스트를 자유롭게 공유하거나 복사할 수 있습니다.하지만 이 문서의 URL은 참조 URL로 남겨 두십시오.
CC BY-SA 2.5, CC BY-SA 3.0 및 CC BY-SA 4.0에 따라 라이센스가 부여됩니다.