org.apache.calcite.tools.RelBuilder

Here are the examples of the java api org.apache.calcite.tools.RelBuilder taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

106 Examples 7

19 Source : ToLogicalConverter.java
with GNU General Public License v3.0
from MyCATApache

/**
 * Shuttle to convert any rel plan to a plan with all logical nodes.
 */
public clreplaced ToLogicalConverter extends RelShuttleImpl {

    private final RelBuilder relBuilder;

    public ToLogicalConverter(RelBuilder relBuilder) {
        this.relBuilder = relBuilder;
    }

    @Override
    public RelNode visit(TableScan scan) {
        return LogicalTableScan.create(scan.getCluster(), scan.getTable(), scan.getHints());
    }

    @Override
    public RelNode visit(RelNode relNode) {
        if (relNode instanceof Aggregate) {
            final Aggregate agg = (Aggregate) relNode;
            return relBuilder.push(visit(agg.getInput())).aggregate(relBuilder.groupKey(agg.getGroupSet(), (Iterable<ImmutableBitSet>) agg.groupSets), agg.getAggCallList()).build();
        }
        if (relNode instanceof TableScan) {
            return visit((TableScan) relNode);
        }
        if (relNode instanceof Filter) {
            final Filter filter = (Filter) relNode;
            return relBuilder.push(visit(filter.getInput())).filter(filter.getCondition()).build();
        }
        if (relNode instanceof Project) {
            final Project project = (Project) relNode;
            return relBuilder.push(visit(project.getInput())).project(project.getProjects(), project.getRowType().getFieldNames()).build();
        }
        if (relNode instanceof Union) {
            final Union union = (Union) relNode;
            for (RelNode rel : union.getInputs()) {
                relBuilder.push(visit(rel));
            }
            return relBuilder.union(union.all, union.getInputs().size()).build();
        }
        if (relNode instanceof Intersect) {
            final Intersect intersect = (Intersect) relNode;
            for (RelNode rel : intersect.getInputs()) {
                relBuilder.push(visit(rel));
            }
            return relBuilder.intersect(intersect.all, intersect.getInputs().size()).build();
        }
        if (relNode instanceof Minus) {
            final Minus minus = (Minus) relNode;
            for (RelNode rel : minus.getInputs()) {
                relBuilder.push(visit(rel));
            }
            return relBuilder.minus(minus.all, minus.getInputs().size()).build();
        }
        if (relNode instanceof Join) {
            final Join join = (Join) relNode;
            return relBuilder.push(visit(join.getLeft())).push(visit(join.getRight())).join(join.getJoinType(), join.getCondition()).build();
        }
        if (relNode instanceof Correlate) {
            final Correlate corr = (Correlate) relNode;
            return relBuilder.push(visit(corr.getLeft())).push(visit(corr.getRight())).join(corr.getJoinType(), relBuilder.literal(true), corr.getVariablesSet()).build();
        }
        if (relNode instanceof Values) {
            final Values values = (Values) relNode;
            return relBuilder.values(values.tuples, values.getRowType()).build();
        }
        if (relNode instanceof Sort) {
            final Sort sort = (Sort) relNode;
            return LogicalSort.create(visit(sort.getInput()), sort.getCollation(), sort.offset, sort.fetch);
        }
        if (relNode instanceof Window) {
            final Window window = (Window) relNode;
            final RelNode input = visit(window.getInput());
            return LogicalWindow.create(input.getTraitSet(), input, window.constants, window.getRowType(), window.groups);
        }
        if (relNode instanceof Calc) {
            final Calc calc = (Calc) relNode;
            return LogicalCalc.create(visit(calc.getInput()), calc.getProgram());
        }
        if (relNode instanceof TableModify) {
            final TableModify tableModify = (TableModify) relNode;
            final RelNode input = visit(tableModify.getInput());
            return LogicalTableModify.create(tableModify.getTable(), tableModify.getCatalogReader(), input, tableModify.getOperation(), tableModify.getUpdateColumnList(), tableModify.getSourceExpressionList(), tableModify.isFlattened());
        }
        // if (relNode instanceof EnumerableInterpreter
        // || relNode instanceof JdbcToEnumerableConverter) {
        // return visit(((SingleRel) relNode).getInput());
        // }
        if (relNode instanceof EnumerableInterpreter) {
            return visit(((SingleRel) relNode).getInput());
        }
        if (relNode instanceof EnumerableLimit) {
            final EnumerableLimit limit = (EnumerableLimit) relNode;
            RelNode logicalInput = visit(limit.getInput());
            RelCollation collation = RelCollations.of();
            if (logicalInput instanceof Sort) {
                collation = ((Sort) logicalInput).collation;
                logicalInput = ((Sort) logicalInput).getInput();
            }
            return LogicalSort.create(logicalInput, collation, limit.offset, limit.fetch);
        }
        if (relNode instanceof Uncollect) {
            final Uncollect uncollect = (Uncollect) relNode;
            final RelNode input = visit(uncollect.getInput());
            return Uncollect.create(input.getTraitSet(), input, uncollect.withOrdinality, Collections.emptyList());
        }
        throw new replacedertionError("Need to implement logical converter for " + relNode.getClreplaced().getName());
    }
}

19 Source : RelFieldTrimmer.java
with Apache License 2.0
from lealone

/**
 * Transformer that walks over a tree of relational expressions, replacing each
 * {@link RelNode} with a 'slimmed down' relational expression that projects
 * only the columns required by its consumer.
 *
 * <p>Uses multi-methods to fire the right rule for each type of relational
 * expression. This allows the transformer to be extended without having to
 * add a new method to RelNode, and without requiring a collection of rule
 * clreplacedes scattered to the four winds.
 *
 * <p>REVIEW: jhyde, 2009/7/28: Is sql2rel the correct package for this clreplaced?
 * Trimming fields is not an essential part of SQL-to-Rel translation, and
 * arguably belongs in the optimization phase. But this transformer does not
 * obey the usual pattern for planner rules; it is difficult to do so, because
 * each {@link RelNode} needs to return a different set of fields after
 * trimming.
 *
 * <p>TODO: Change 2nd arg of the {@link #trimFields} method from BitSet to
 * Mapping. Sometimes it helps the consumer if you return the columns in a
 * particular order. For instance, it may avoid a project at the top of the
 * tree just for reordering. Could ease the transition by writing methods that
 * convert BitSet to Mapping and vice versa.
 */
public clreplaced RelFieldTrimmer implements ReflectiveVisitor {

    // ~ Static fields/initializers ---------------------------------------------
    // ~ Instance fields --------------------------------------------------------
    private final ReflectUtil.MethodDispatcher<TrimResult> trimFieldsDispatcher;

    private final RelBuilder relBuilder;

    // ~ Constructors -----------------------------------------------------------
    /**
     * Creates a RelFieldTrimmer.
     *
     * @param validator Validator
     */
    public RelFieldTrimmer(SqlValidator validator, RelBuilder relBuilder) {
        // may be useful one day
        Util.discard(validator);
        this.relBuilder = relBuilder;
        this.trimFieldsDispatcher = ReflectUtil.createMethodDispatcher(TrimResult.clreplaced, this, "trimFields", RelNode.clreplaced, ImmutableBitSet.clreplaced, Set.clreplaced);
    }

    // to be removed before 2.0
    @Deprecated
    public RelFieldTrimmer(SqlValidator validator, RelOptCluster cluster, RelFactories.ProjectFactory projectFactory, RelFactories.FilterFactory filterFactory, RelFactories.JoinFactory joinFactory, RelFactories.SemiJoinFactory semiJoinFactory, RelFactories.SortFactory sortFactory, RelFactories.AggregateFactory aggregateFactory, RelFactories.SetOpFactory setOpFactory) {
        this(validator, RelBuilder.proto(projectFactory, filterFactory, joinFactory, semiJoinFactory, sortFactory, aggregateFactory, setOpFactory).create(cluster, null));
    }

    // ~ Methods ----------------------------------------------------------------
    /**
     * Trims unused fields from a relational expression.
     *
     * <p>We presume that all fields of the relational expression are wanted by
     * its consumer, so only trim fields that are not used within the tree.
     *
     * @param root Root node of relational expression
     * @return Trimmed relational expression
     */
    public RelNode trim(RelNode root) {
        final int fieldCount = root.getRowType().getFieldCount();
        final ImmutableBitSet fieldsUsed = ImmutableBitSet.range(fieldCount);
        final Set<RelDataTypeField> extraFields = Collections.emptySet();
        final TrimResult trimResult = dispatchTrimFields(root, fieldsUsed, extraFields);
        if (!trimResult.right.isIdenreplacedy()) {
            throw new IllegalArgumentException();
        }
        if (SqlToRelConverter.SQL2REL_LOGGER.isDebugEnabled()) {
            SqlToRelConverter.SQL2REL_LOGGER.debug(RelOptUtil.dumpPlan("Plan after trimming unused fields", trimResult.left, SqlExplainFormat.TEXT, SqlExplainLevel.EXPPLAN_ATTRIBUTES));
        }
        return trimResult.left;
    }

    /**
     * Trims the fields of an input relational expression.
     *
     * @param rel        Relational expression
     * @param input      Input relational expression, whose fields to trim
     * @param fieldsUsed Bitmap of fields needed by the consumer
     * @return New relational expression and its field mapping
     */
    protected TrimResult trimChild(RelNode rel, RelNode input, final ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        final ImmutableBitSet.Builder fieldsUsedBuilder = fieldsUsed.rebuild();
        // Fields that define the collation cannot be discarded.
        final RelMetadataQuery mq = rel.getCluster().getMetadataQuery();
        final ImmutableList<RelCollation> collations = mq.collations(input);
        for (RelCollation collation : collations) {
            for (RelFieldCollation fieldCollation : collation.getFieldCollations()) {
                fieldsUsedBuilder.set(fieldCollation.getFieldIndex());
            }
        }
        // Correlating variables are a means for other relational expressions to use
        // fields.
        for (final CorrelationId correlation : rel.getVariablesSet()) {
            rel.accept(new CorrelationReferenceFinder() {

                @Override
                protected RexNode handle(RexFieldAccess fieldAccess) {
                    final RexCorrelVariable v = (RexCorrelVariable) fieldAccess.getReferenceExpr();
                    if (v.getCorrelationId().equals(correlation)) {
                        fieldsUsedBuilder.set(fieldAccess.getField().getIndex());
                    }
                    return fieldAccess;
                }
            });
        }
        return dispatchTrimFields(input, fieldsUsedBuilder.build(), extraFields);
    }

    /**
     * Trims a child relational expression, then adds back a dummy project to
     * restore the fields that were removed.
     *
     * <p>Sounds pointless? It causes unused fields to be removed
     * further down the tree (towards the leaves), but it ensure that the
     * consuming relational expression continues to see the same fields.
     *
     * @param rel        Relational expression
     * @param input      Input relational expression, whose fields to trim
     * @param fieldsUsed Bitmap of fields needed by the consumer
     * @return New relational expression and its field mapping
     */
    protected TrimResult trimChildRestore(RelNode rel, RelNode input, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        TrimResult trimResult = trimChild(rel, input, fieldsUsed, extraFields);
        if (trimResult.right.isIdenreplacedy()) {
            return trimResult;
        }
        final RelDataType rowType = input.getRowType();
        List<RelDataTypeField> fieldList = rowType.getFieldList();
        final List<RexNode> exprList = new ArrayList<>();
        final List<String> nameList = rowType.getFieldNames();
        RexBuilder rexBuilder = rel.getCluster().getRexBuilder();
        replacedert trimResult.right.getSourceCount() == fieldList.size();
        for (int i = 0; i < fieldList.size(); i++) {
            int source = trimResult.right.getTargetOpt(i);
            RelDataTypeField field = fieldList.get(i);
            exprList.add(source < 0 ? rexBuilder.makeZeroLiteral(field.getType()) : rexBuilder.makeInputRef(field.getType(), source));
        }
        relBuilder.push(trimResult.left).project(exprList, nameList);
        return result(relBuilder.build(), Mappings.createIdenreplacedy(fieldList.size()));
    }

    /**
     * Invokes {@link #trimFields}, or the appropriate method for the type
     * of the rel parameter, using multi-method dispatch.
     *
     * @param rel        Relational expression
     * @param fieldsUsed Bitmap of fields needed by the consumer
     * @return New relational expression and its field mapping
     */
    protected final TrimResult dispatchTrimFields(RelNode rel, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        final TrimResult trimResult = trimFieldsDispatcher.invoke(rel, fieldsUsed, extraFields);
        final RelNode newRel = trimResult.left;
        final Mapping mapping = trimResult.right;
        final int fieldCount = rel.getRowType().getFieldCount();
        replacedert mapping.getSourceCount() == fieldCount : "source: " + mapping.getSourceCount() + " != " + fieldCount;
        final int newFieldCount = newRel.getRowType().getFieldCount();
        replacedert mapping.getTargetCount() + extraFields.size() == newFieldCount || Bug.TODO_FIXED : "target: " + mapping.getTargetCount() + " + " + extraFields.size() + " != " + newFieldCount;
        if (Bug.TODO_FIXED) {
            replacedert newFieldCount > 0 : "rel has no fields after trim: " + rel;
        }
        if (newRel.equals(rel)) {
            return result(rel, mapping);
        }
        return trimResult;
    }

    protected TrimResult result(RelNode r, final Mapping mapping) {
        final RexBuilder rexBuilder = relBuilder.getRexBuilder();
        for (final CorrelationId correlation : r.getVariablesSet()) {
            r = r.accept(new CorrelationReferenceFinder() {

                @Override
                protected RexNode handle(RexFieldAccess fieldAccess) {
                    final RexCorrelVariable v = (RexCorrelVariable) fieldAccess.getReferenceExpr();
                    if (v.getCorrelationId().equals(correlation) && v.getType().getFieldCount() == mapping.getSourceCount()) {
                        final int old = fieldAccess.getField().getIndex();
                        final int new_ = mapping.getTarget(old);
                        final RelDataTypeFactory.Builder typeBuilder = relBuilder.getTypeFactory().builder();
                        for (int target : Util.range(mapping.getTargetCount())) {
                            typeBuilder.add(v.getType().getFieldList().get(mapping.getSource(target)));
                        }
                        final RexNode newV = rexBuilder.makeCorrel(typeBuilder.build(), v.getCorrelationId());
                        if (old != new_) {
                            return rexBuilder.makeFieldAccess(newV, new_);
                        }
                    }
                    return fieldAccess;
                }
            });
        }
        return new TrimResult(r, mapping);
    }

    /**
     * Visit method, per {@link org.apache.calcite.util.ReflectiveVisitor}.
     *
     * <p>This method is invoked reflectively, so there may not be any apparent
     * calls to it. The clreplaced (or derived clreplacedes) may contain overloads of
     * this method with more specific types for the {@code rel} parameter.
     *
     * <p>Returns a pair: the relational expression created, and the mapping
     * between the original fields and the fields of the newly created
     * relational expression.
     *
     * @param rel        Relational expression
     * @param fieldsUsed Fields needed by the consumer
     * @return relational expression and mapping
     */
    public TrimResult trimFields(RelNode rel, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        // We don't know how to trim this kind of relational expression, so give
        // it back intact.
        Util.discard(fieldsUsed);
        return result(rel, Mappings.createIdenreplacedy(rel.getRowType().getFieldCount()));
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
     * {@link org.apache.calcite.rel.logical.LogicalProject}.
     */
    public TrimResult trimFields(Project project, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        final RelDataType rowType = project.getRowType();
        final int fieldCount = rowType.getFieldCount();
        final RelNode input = project.getInput();
        // Which fields are required from the input?
        final Set<RelDataTypeField> inputExtraFields = new LinkedHashSet<>(extraFields);
        RelOptUtil.InputFinder inputFinder = new RelOptUtil.InputFinder(inputExtraFields);
        for (Ord<RexNode> ord : Ord.zip(project.getProjects())) {
            if (fieldsUsed.get(ord.i)) {
                ord.e.accept(inputFinder);
            }
        }
        ImmutableBitSet inputFieldsUsed = inputFinder.inputBitSet.build();
        // Create input with trimmed columns.
        TrimResult trimResult = trimChild(project, input, inputFieldsUsed, inputExtraFields);
        RelNode newInput = trimResult.left;
        final Mapping inputMapping = trimResult.right;
        // If the input is unchanged, and we need to project all columns,
        // there's nothing we can do.
        if (newInput == input && fieldsUsed.cardinality() == fieldCount) {
            return result(project, Mappings.createIdenreplacedy(fieldCount));
        }
        // Some parts of the system can't handle rows with zero fields, so
        // pretend that one field is used.
        if (fieldsUsed.cardinality() == 0) {
            return dummyProject(fieldCount, newInput);
        }
        // Build new project expressions, and populate the mapping.
        final List<RexNode> newProjects = new ArrayList<>();
        final RexVisitor<RexNode> shuttle = new RexPermuteInputsShuttle(inputMapping, newInput);
        final Mapping mapping = Mappings.create(MappingType.INVERSE_SURJECTION, fieldCount, fieldsUsed.cardinality());
        for (Ord<RexNode> ord : Ord.zip(project.getProjects())) {
            if (fieldsUsed.get(ord.i)) {
                mapping.set(ord.i, newProjects.size());
                RexNode newProjectExpr = ord.e.accept(shuttle);
                newProjects.add(newProjectExpr);
            }
        }
        final RelDataType newRowType = RelOptUtil.permute(project.getCluster().getTypeFactory(), rowType, mapping);
        relBuilder.push(newInput);
        relBuilder.project(newProjects, newRowType.getFieldNames());
        return result(relBuilder.build(), mapping);
    }

    /**
     * Creates a project with a dummy column, to protect the parts of the system
     * that cannot handle a relational expression with no columns.
     *
     * @param fieldCount Number of fields in the original relational expression
     * @param input Trimmed input
     * @return Dummy project, or null if no dummy is required
     */
    protected TrimResult dummyProject(int fieldCount, RelNode input) {
        final RelOptCluster cluster = input.getCluster();
        final Mapping mapping = Mappings.create(MappingType.INVERSE_SURJECTION, fieldCount, 1);
        if (input.getRowType().getFieldCount() == 1) {
            // Input already has one field (and may in fact be a dummy project we
            // created for the child). We can't do better.
            return result(input, mapping);
        }
        final RexLiteral expr = cluster.getRexBuilder().makeExactLiteral(BigDecimal.ZERO);
        relBuilder.push(input);
        relBuilder.project(ImmutableList.<RexNode>of(expr), ImmutableList.of("DUMMY"));
        return result(relBuilder.build(), mapping);
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
     * {@link org.apache.calcite.rel.logical.LogicalFilter}.
     */
    public TrimResult trimFields(Filter filter, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        final RelDataType rowType = filter.getRowType();
        final int fieldCount = rowType.getFieldCount();
        final RexNode conditionExpr = filter.getCondition();
        final RelNode input = filter.getInput();
        // We use the fields used by the consumer, plus any fields used in the
        // filter.
        final Set<RelDataTypeField> inputExtraFields = new LinkedHashSet<>(extraFields);
        RelOptUtil.InputFinder inputFinder = new RelOptUtil.InputFinder(inputExtraFields);
        inputFinder.inputBitSet.addAll(fieldsUsed);
        conditionExpr.accept(inputFinder);
        final ImmutableBitSet inputFieldsUsed = inputFinder.inputBitSet.build();
        // Create input with trimmed columns.
        TrimResult trimResult = trimChild(filter, input, inputFieldsUsed, inputExtraFields);
        RelNode newInput = trimResult.left;
        final Mapping inputMapping = trimResult.right;
        // If the input is unchanged, and we need to project all columns,
        // there's nothing we can do.
        if (newInput == input && fieldsUsed.cardinality() == fieldCount) {
            return result(filter, Mappings.createIdenreplacedy(fieldCount));
        }
        // Build new project expressions, and populate the mapping.
        final RexVisitor<RexNode> shuttle = new RexPermuteInputsShuttle(inputMapping, newInput);
        RexNode newConditionExpr = conditionExpr.accept(shuttle);
        // Use copy rather than relBuilder so that correlating variables get set.
        relBuilder.push(filter.copy(filter.getTraitSet(), newInput, newConditionExpr));
        // The result has the same mapping as the input gave us. Sometimes we
        // return fields that the consumer didn't ask for, because the filter
        // needs them for its condition.
        return result(relBuilder.build(), inputMapping);
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
     * {@link org.apache.calcite.rel.core.Sort}.
     */
    public TrimResult trimFields(Sort sort, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        final RelDataType rowType = sort.getRowType();
        final int fieldCount = rowType.getFieldCount();
        final RelCollation collation = sort.getCollation();
        final RelNode input = sort.getInput();
        // We use the fields used by the consumer, plus any fields used as sort
        // keys.
        final ImmutableBitSet.Builder inputFieldsUsed = fieldsUsed.rebuild();
        for (RelFieldCollation field : collation.getFieldCollations()) {
            inputFieldsUsed.set(field.getFieldIndex());
        }
        // Create input with trimmed columns.
        final Set<RelDataTypeField> inputExtraFields = Collections.emptySet();
        TrimResult trimResult = trimChild(sort, input, inputFieldsUsed.build(), inputExtraFields);
        RelNode newInput = trimResult.left;
        final Mapping inputMapping = trimResult.right;
        // If the input is unchanged, and we need to project all columns,
        // there's nothing we can do.
        if (newInput == input && inputMapping.isIdenreplacedy() && fieldsUsed.cardinality() == fieldCount) {
            return result(sort, Mappings.createIdenreplacedy(fieldCount));
        }
        // leave the Sort unchanged in case we have dynamic limits
        if (sort.offset instanceof RexDynamicParam || sort.fetch instanceof RexDynamicParam) {
            return result(sort, inputMapping);
        }
        relBuilder.push(newInput);
        final int offset = sort.offset == null ? 0 : RexLiteral.intValue(sort.offset);
        final int fetch = sort.fetch == null ? -1 : RexLiteral.intValue(sort.fetch);
        final ImmutableList<RexNode> fields = relBuilder.fields(RexUtil.apply(inputMapping, collation));
        relBuilder.sortLimit(offset, fetch, fields);
        // The result has the same mapping as the input gave us. Sometimes we
        // return fields that the consumer didn't ask for, because the filter
        // needs them for its condition.
        return result(relBuilder.build(), inputMapping);
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
     * {@link org.apache.calcite.rel.logical.LogicalJoin}.
     */
    public TrimResult trimFields(Join join, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        final int fieldCount = join.getSystemFieldList().size() + join.getLeft().getRowType().getFieldCount() + join.getRight().getRowType().getFieldCount();
        final RexNode conditionExpr = join.getCondition();
        final int systemFieldCount = join.getSystemFieldList().size();
        // Add in fields used in the condition.
        final Set<RelDataTypeField> combinedInputExtraFields = new LinkedHashSet<>(extraFields);
        RelOptUtil.InputFinder inputFinder = new RelOptUtil.InputFinder(combinedInputExtraFields);
        inputFinder.inputBitSet.addAll(fieldsUsed);
        conditionExpr.accept(inputFinder);
        final ImmutableBitSet fieldsUsedPlus = inputFinder.inputBitSet.build();
        // If no system fields are used, we can remove them.
        int systemFieldUsedCount = 0;
        for (int i = 0; i < systemFieldCount; ++i) {
            if (fieldsUsed.get(i)) {
                ++systemFieldUsedCount;
            }
        }
        final int newSystemFieldCount;
        if (systemFieldUsedCount == 0) {
            newSystemFieldCount = 0;
        } else {
            newSystemFieldCount = systemFieldCount;
        }
        int offset = systemFieldCount;
        int changeCount = 0;
        int newFieldCount = newSystemFieldCount;
        final List<RelNode> newInputs = new ArrayList<>(2);
        final List<Mapping> inputMappings = new ArrayList<>();
        final List<Integer> inputExtraFieldCounts = new ArrayList<>();
        for (RelNode input : join.getInputs()) {
            final RelDataType inputRowType = input.getRowType();
            final int inputFieldCount = inputRowType.getFieldCount();
            // Compute required mapping.
            ImmutableBitSet.Builder inputFieldsUsed = ImmutableBitSet.builder();
            for (int bit : fieldsUsedPlus) {
                if (bit >= offset && bit < offset + inputFieldCount) {
                    inputFieldsUsed.set(bit - offset);
                }
            }
            // If there are system fields, we automatically use the
            // corresponding field in each input.
            inputFieldsUsed.set(0, newSystemFieldCount);
            // FIXME: We ought to collect extra fields for each input
            // individually. For now, we replacedume that just one input has
            // on-demand fields.
            Set<RelDataTypeField> inputExtraFields = RelDataTypeImpl.extra(inputRowType) == null ? Collections.emptySet() : combinedInputExtraFields;
            inputExtraFieldCounts.add(inputExtraFields.size());
            TrimResult trimResult = trimChild(join, input, inputFieldsUsed.build(), inputExtraFields);
            newInputs.add(trimResult.left);
            if (trimResult.left != input) {
                ++changeCount;
            }
            final Mapping inputMapping = trimResult.right;
            inputMappings.add(inputMapping);
            // Move offset to point to start of next input.
            offset += inputFieldCount;
            newFieldCount += inputMapping.getTargetCount() + inputExtraFields.size();
        }
        Mapping mapping = Mappings.create(MappingType.INVERSE_SURJECTION, fieldCount, newFieldCount);
        for (int i = 0; i < newSystemFieldCount; ++i) {
            mapping.set(i, i);
        }
        offset = systemFieldCount;
        int newOffset = newSystemFieldCount;
        for (int i = 0; i < inputMappings.size(); i++) {
            Mapping inputMapping = inputMappings.get(i);
            for (IntPair pair : inputMapping) {
                mapping.set(pair.source + offset, pair.target + newOffset);
            }
            offset += inputMapping.getSourceCount();
            newOffset += inputMapping.getTargetCount() + inputExtraFieldCounts.get(i);
        }
        if (changeCount == 0 && mapping.isIdenreplacedy()) {
            return result(join, Mappings.createIdenreplacedy(fieldCount));
        }
        // Build new join.
        final RexVisitor<RexNode> shuttle = new RexPermuteInputsShuttle(mapping, newInputs.get(0), newInputs.get(1));
        RexNode newConditionExpr = conditionExpr.accept(shuttle);
        relBuilder.push(newInputs.get(0));
        relBuilder.push(newInputs.get(1));
        if (join instanceof SemiJoin) {
            relBuilder.semiJoin(newConditionExpr);
            // For SemiJoins only map fields from the left-side
            Mapping inputMapping = inputMappings.get(0);
            mapping = Mappings.create(MappingType.INVERSE_SURJECTION, join.getRowType().getFieldCount(), newSystemFieldCount + inputMapping.getTargetCount());
            for (int i = 0; i < newSystemFieldCount; ++i) {
                mapping.set(i, i);
            }
            offset = systemFieldCount;
            newOffset = newSystemFieldCount;
            for (IntPair pair : inputMapping) {
                mapping.set(pair.source + offset, pair.target + newOffset);
            }
        } else {
            relBuilder.join(join.getJoinType(), newConditionExpr);
        }
        return result(relBuilder.build(), mapping);
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
     * {@link org.apache.calcite.rel.core.SetOp} (including UNION and UNION ALL).
     */
    public TrimResult trimFields(SetOp setOp, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        final RelDataType rowType = setOp.getRowType();
        final int fieldCount = rowType.getFieldCount();
        int changeCount = 0;
        // Fennel abhors an empty row type, so pretend that the parent rel
        // wants the last field. (The last field is the least likely to be a
        // system field.)
        if (fieldsUsed.isEmpty()) {
            fieldsUsed = ImmutableBitSet.of(rowType.getFieldCount() - 1);
        }
        // Compute the desired field mapping. Give the consumer the fields they
        // want, in the order that they appear in the bitset.
        final Mapping mapping = createMapping(fieldsUsed, fieldCount);
        // Create input with trimmed columns.
        for (RelNode input : setOp.getInputs()) {
            TrimResult trimResult = trimChild(setOp, input, fieldsUsed, extraFields);
            // We want "mapping", the input gave us "inputMapping", compute
            // "remaining" mapping.
            // | | |
            // |---------------- mapping ---------->|
            // |-- inputMapping -->| |
            // | |-- remaining -->|
            // 
            // For instance, suppose we have columns [a, b, c, d],
            // the consumer asked for mapping = [b, d],
            // and the transformed input has columns inputMapping = [d, a, b].
            // remaining will permute [b, d] to [d, a, b].
            Mapping remaining = Mappings.divide(mapping, trimResult.right);
            // Create a projection; does nothing if remaining is idenreplacedy.
            relBuilder.push(trimResult.left);
            relBuilder.permute(remaining);
            if (input != relBuilder.peek()) {
                ++changeCount;
            }
        }
        // If the input is unchanged, and we need to project all columns,
        // there's to do.
        if (changeCount == 0 && mapping.isIdenreplacedy()) {
            for (RelNode input : setOp.getInputs()) {
                relBuilder.build();
            }
            return result(setOp, mapping);
        }
        switch(setOp.kind) {
            case UNION:
                relBuilder.union(setOp.all, setOp.getInputs().size());
                break;
            case INTERSECT:
                relBuilder.intersect(setOp.all, setOp.getInputs().size());
                break;
            case EXCEPT:
                replacedert setOp.getInputs().size() == 2;
                relBuilder.minus(setOp.all);
                break;
            default:
                throw new replacedertionError("unknown setOp " + setOp);
        }
        return result(relBuilder.build(), mapping);
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
     * {@link org.apache.calcite.rel.logical.LogicalAggregate}.
     */
    public TrimResult trimFields(Aggregate aggregate, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        // Fields:
        // 
        // | sys fields | group fields | indicator fields | agg functions |
        // 
        // Two kinds of trimming:
        // 
        // 1. If agg rel has system fields but none of these are used, create an
        // agg rel with no system fields.
        // 
        // 2. If aggregate functions are not used, remove them.
        // 
        // But group and indicator fields stay, even if they are not used.
        final RelDataType rowType = aggregate.getRowType();
        // Compute which input fields are used.
        // 1. group fields are always used
        final ImmutableBitSet.Builder inputFieldsUsed = aggregate.getGroupSet().rebuild();
        // 2. agg functions
        for (AggregateCall aggCall : aggregate.getAggCallList()) {
            inputFieldsUsed.addAll(aggCall.getArgList());
            if (aggCall.filterArg >= 0) {
                inputFieldsUsed.set(aggCall.filterArg);
            }
            inputFieldsUsed.addAll(RelCollations.ordinals(aggCall.collation));
        }
        // Create input with trimmed columns.
        final RelNode input = aggregate.getInput();
        final Set<RelDataTypeField> inputExtraFields = Collections.emptySet();
        final TrimResult trimResult = trimChild(aggregate, input, inputFieldsUsed.build(), inputExtraFields);
        final RelNode newInput = trimResult.left;
        final Mapping inputMapping = trimResult.right;
        // We have to return group keys and (if present) indicators.
        // So, pretend that the consumer asked for them.
        final int groupCount = aggregate.getGroupSet().cardinality();
        final int indicatorCount = aggregate.getIndicatorCount();
        fieldsUsed = fieldsUsed.union(ImmutableBitSet.range(groupCount + indicatorCount));
        // If the input is unchanged, and we need to project all columns,
        // there's nothing to do.
        if (input == newInput && fieldsUsed.equals(ImmutableBitSet.range(rowType.getFieldCount()))) {
            return result(aggregate, Mappings.createIdenreplacedy(rowType.getFieldCount()));
        }
        // Which agg calls are used by our consumer?
        int j = groupCount + indicatorCount;
        int usedAggCallCount = 0;
        for (int i = 0; i < aggregate.getAggCallList().size(); i++) {
            if (fieldsUsed.get(j++)) {
                ++usedAggCallCount;
            }
        }
        // Offset due to the number of system fields having changed.
        Mapping mapping = Mappings.create(MappingType.INVERSE_SURJECTION, rowType.getFieldCount(), groupCount + indicatorCount + usedAggCallCount);
        final ImmutableBitSet newGroupSet = Mappings.apply(inputMapping, aggregate.getGroupSet());
        final ImmutableList<ImmutableBitSet> newGroupSets = ImmutableList.copyOf(Iterables.transform(aggregate.getGroupSets(), input1 -> Mappings.apply(inputMapping, input1)));
        // Populate mapping of where to find the fields. System, group key and
        // indicator fields first.
        for (j = 0; j < groupCount + indicatorCount; j++) {
            mapping.set(j, j);
        }
        // Now create new agg calls, and populate mapping for them.
        relBuilder.push(newInput);
        final List<RelBuilder.AggCall> newAggCallList = new ArrayList<>();
        j = groupCount + indicatorCount;
        for (AggregateCall aggCall : aggregate.getAggCallList()) {
            if (fieldsUsed.get(j)) {
                final ImmutableList<RexNode> args = relBuilder.fields(Mappings.apply2(inputMapping, aggCall.getArgList()));
                final RexNode filterArg = aggCall.filterArg < 0 ? null : relBuilder.field(Mappings.apply(inputMapping, aggCall.filterArg));
                RelBuilder.AggCall newAggCall = relBuilder.aggregateCall(aggCall.getAggregation(), args).distinct(aggCall.isDistinct()).filter(filterArg).approximate(aggCall.isApproximate()).sort(relBuilder.fields(aggCall.collation)).as(aggCall.name);
                mapping.set(j, groupCount + indicatorCount + newAggCallList.size());
                newAggCallList.add(newAggCall);
            }
            ++j;
        }
        final RelBuilder.GroupKey groupKey = relBuilder.groupKey(newGroupSet, newGroupSets);
        relBuilder.aggregate(groupKey, newAggCallList);
        return result(relBuilder.build(), mapping);
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
     * {@link org.apache.calcite.rel.logical.LogicalTableModify}.
     */
    public TrimResult trimFields(LogicalTableModify modifier, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        // Ignore what consumer wants. We always project all columns.
        Util.discard(fieldsUsed);
        final RelDataType rowType = modifier.getRowType();
        final int fieldCount = rowType.getFieldCount();
        RelNode input = modifier.getInput();
        // We want all fields from the child.
        final int inputFieldCount = input.getRowType().getFieldCount();
        final ImmutableBitSet inputFieldsUsed = ImmutableBitSet.range(inputFieldCount);
        // Create input with trimmed columns.
        final Set<RelDataTypeField> inputExtraFields = Collections.emptySet();
        TrimResult trimResult = trimChild(modifier, input, inputFieldsUsed, inputExtraFields);
        RelNode newInput = trimResult.left;
        final Mapping inputMapping = trimResult.right;
        if (!inputMapping.isIdenreplacedy()) {
            // We asked for all fields. Can't believe that the child decided
            // to permute them!
            throw new replacedertionError("Expected idenreplacedy mapping, got " + inputMapping);
        }
        LogicalTableModify newModifier = modifier;
        if (newInput != input) {
            newModifier = modifier.copy(modifier.getTraitSet(), Collections.singletonList(newInput));
        }
        replacedert newModifier.getClreplaced() == modifier.getClreplaced();
        // Always project all fields.
        Mapping mapping = Mappings.createIdenreplacedy(fieldCount);
        return result(newModifier, mapping);
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
     * {@link org.apache.calcite.rel.logical.LogicalTableFunctionScan}.
     */
    public TrimResult trimFields(LogicalTableFunctionScan tabFun, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        final RelDataType rowType = tabFun.getRowType();
        final int fieldCount = rowType.getFieldCount();
        final List<RelNode> newInputs = new ArrayList<>();
        for (RelNode input : tabFun.getInputs()) {
            final int inputFieldCount = input.getRowType().getFieldCount();
            ImmutableBitSet inputFieldsUsed = ImmutableBitSet.range(inputFieldCount);
            // Create input with trimmed columns.
            final Set<RelDataTypeField> inputExtraFields = Collections.emptySet();
            TrimResult trimResult = trimChildRestore(tabFun, input, inputFieldsUsed, inputExtraFields);
            replacedert trimResult.right.isIdenreplacedy();
            newInputs.add(trimResult.left);
        }
        LogicalTableFunctionScan newTabFun = tabFun;
        if (!tabFun.getInputs().equals(newInputs)) {
            newTabFun = tabFun.copy(tabFun.getTraitSet(), newInputs, tabFun.getCall(), tabFun.getElementType(), tabFun.getRowType(), tabFun.getColumnMappings());
        }
        replacedert newTabFun.getClreplaced() == tabFun.getClreplaced();
        // Always project all fields.
        Mapping mapping = Mappings.createIdenreplacedy(fieldCount);
        return result(newTabFun, mapping);
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
     * {@link org.apache.calcite.rel.logical.LogicalValues}.
     */
    public TrimResult trimFields(LogicalValues values, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        final RelDataType rowType = values.getRowType();
        final int fieldCount = rowType.getFieldCount();
        // If they are asking for no fields, we can't give them what they want,
        // because zero-column records are illegal. Give them the last field,
        // which is unlikely to be a system field.
        if (fieldsUsed.isEmpty()) {
            fieldsUsed = ImmutableBitSet.range(fieldCount - 1, fieldCount);
        }
        // If all fields are used, return unchanged.
        if (fieldsUsed.equals(ImmutableBitSet.range(fieldCount))) {
            Mapping mapping = Mappings.createIdenreplacedy(fieldCount);
            return result(values, mapping);
        }
        final ImmutableList.Builder<ImmutableList<RexLiteral>> newTuples = ImmutableList.builder();
        for (ImmutableList<RexLiteral> tuple : values.getTuples()) {
            ImmutableList.Builder<RexLiteral> newTuple = ImmutableList.builder();
            for (int field : fieldsUsed) {
                newTuple.add(tuple.get(field));
            }
            newTuples.add(newTuple.build());
        }
        final Mapping mapping = createMapping(fieldsUsed, fieldCount);
        final RelDataType newRowType = RelOptUtil.permute(values.getCluster().getTypeFactory(), rowType, mapping);
        final LogicalValues newValues = LogicalValues.create(values.getCluster(), newRowType, newTuples.build());
        return result(newValues, mapping);
    }

    protected Mapping createMapping(ImmutableBitSet fieldsUsed, int fieldCount) {
        final Mapping mapping = Mappings.create(MappingType.INVERSE_SURJECTION, fieldCount, fieldsUsed.cardinality());
        int i = 0;
        for (int field : fieldsUsed) {
            mapping.set(field, i++);
        }
        return mapping;
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
     * {@link org.apache.calcite.rel.logical.LogicalTableScan}.
     */
    public TrimResult trimFields(final TableScan tableAccessRel, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        final int fieldCount = tableAccessRel.getRowType().getFieldCount();
        if (fieldsUsed.equals(ImmutableBitSet.range(fieldCount)) && extraFields.isEmpty()) {
            // if there is nothing to project or if we are projecting everything
            // then no need to introduce another RelNode
            return trimFields((RelNode) tableAccessRel, fieldsUsed, extraFields);
        }
        final RelNode newTableAccessRel = tableAccessRel.project(fieldsUsed, extraFields, relBuilder);
        // Some parts of the system can't handle rows with zero fields, so
        // pretend that one field is used.
        if (fieldsUsed.cardinality() == 0) {
            RelNode input = newTableAccessRel;
            if (input instanceof Project) {
                // The table has implemented the project in the obvious way - by
                // creating project with 0 fields. Strip it away, and create our own
                // project with one field.
                Project project = (Project) input;
                if (project.getRowType().getFieldCount() == 0) {
                    input = project.getInput();
                }
            }
            return dummyProject(fieldCount, input);
        }
        final Mapping mapping = createMapping(fieldsUsed, fieldCount);
        return result(newTableAccessRel, mapping);
    }

    // ~ Inner Clreplacedes ----------------------------------------------------------
    /**
     * Result of an attempt to trim columns from a relational expression.
     *
     * <p>The mapping describes where to find the columns wanted by the parent
     * of the current relational expression.
     *
     * <p>The mapping is a
     * {@link org.apache.calcite.util.mapping.Mappings.SourceMapping}, which means
     * that no column can be used more than once, and some columns are not used.
     * {@code columnsUsed.getSource(i)} returns the source of the i'th output
     * field.
     *
     * <p>For example, consider the mapping for a relational expression that
     * has 4 output columns but only two are being used. The mapping
     * {2 → 1, 3 → 0} would give the following behavior:
     *
     * <ul>
     * <li>columnsUsed.getSourceCount() returns 4
     * <li>columnsUsed.getTargetCount() returns 2
     * <li>columnsUsed.getSource(0) returns 3
     * <li>columnsUsed.getSource(1) returns 2
     * <li>columnsUsed.getSource(2) throws IndexOutOfBounds
     * <li>columnsUsed.getTargetOpt(3) returns 0
     * <li>columnsUsed.getTargetOpt(0) returns -1
     * </ul>
     */
    protected static clreplaced TrimResult extends Pair<RelNode, Mapping> {

        /**
         * Creates a TrimResult.
         *
         * @param left  New relational expression
         * @param right Mapping of fields onto original fields
         */
        public TrimResult(RelNode left, Mapping right) {
            super(left, right);
            replacedert right.getTargetCount() == left.getRowType().getFieldCount() : "rowType: " + left.getRowType() + ", mapping: " + right;
        }
    }
}

19 Source : SubQueryRemoveRule.java
with Apache License 2.0
from lealone

protected RexNode apply(RexSubQuery e, Set<CorrelationId> variablesSet, RelOptUtil.Logic logic, RelBuilder builder, int inputCount, int offset) {
    switch(e.getKind()) {
        case SCALAR_QUERY:
            return rewriteScalarQuery(e, variablesSet, builder, inputCount, offset);
        case SOME:
            return rewriteSome(e, builder);
        case IN:
            return rewriteIn(e, variablesSet, logic, builder, offset);
        case EXISTS:
            return rewriteExists(e, variablesSet, logic, builder);
        default:
            throw new replacedertionError(e.getKind());
    }
}

19 Source : SubQueryRemoveRule.java
with Apache License 2.0
from lealone

/**
 * Returns a reference to a particular field, by offset, across several
 * inputs on a {@link RelBuilder}'s stack.
 */
private RexInputRef field(RelBuilder builder, int inputCount, int offset) {
    for (int inputOrdinal = 0; ; ) {
        final RelNode r = builder.peek(inputCount, inputOrdinal);
        if (offset < r.getRowType().getFieldCount()) {
            return builder.field(inputCount, inputOrdinal, offset);
        }
        ++inputOrdinal;
        offset -= r.getRowType().getFieldCount();
    }
}

19 Source : SubQueryRemoveRule.java
with Apache License 2.0
from lealone

/**
 * Returns a list of expressions that project the first {@code fieldCount}
 * fields of the top input on a {@link RelBuilder}'s stack.
 */
private static List<RexNode> fields(RelBuilder builder, int fieldCount) {
    final List<RexNode> projects = new ArrayList<>();
    for (int i = 0; i < fieldCount; i++) {
        projects.add(builder.field(i));
    }
    return projects;
}

19 Source : ExtendedAggregateExtractProjectRule.java
with Apache License 2.0
from flink-tpc-ds

/**
 * Extract a project from the input aggregate and return a new aggregate.
 */
private RelNode performExtractForAggregate(Aggregate aggregate, RelNode input, RelBuilder relBuilder) {
    Mapping mapping = extractProjectsAndMapping(aggregate, input, relBuilder);
    return getNewAggregate(aggregate, relBuilder, mapping);
}

19 Source : ExtendedAggregateExtractProjectRule.java
with Apache License 2.0
from flink-tpc-ds

/**
 * Extract a project from the input table aggregate and return a new table aggregate.
 */
private RelNode performExtractForTableAggregate(TableAggregate aggregate, RelNode input, RelBuilder relBuilder) {
    RelNode newAggregate = performExtractForAggregate(aggregate.getCorrespondingAggregate(), input, relBuilder);
    if (aggregate instanceof LogicalTableAggregate) {
        return LogicalTableAggregate.create((Aggregate) newAggregate);
    } else {
        return LogicalWindowTableAggregate.create((LogicalWindowAggregate) newAggregate);
    }
}

19 Source : ExtendedAggregateExtractProjectRule.java
with Apache License 2.0
from flink-tpc-ds

@Override
public void onMatch(RelOptRuleCall call) {
    final RelNode relNode = call.rel(0);
    final RelNode input = call.rel(1);
    final RelBuilder relBuilder = call.builder().push(input);
    if (relNode instanceof Aggregate) {
        call.transformTo(performExtractForAggregate((Aggregate) relNode, input, relBuilder));
    } else if (relNode instanceof TableAggregate) {
        call.transformTo(performExtractForTableAggregate((TableAggregate) relNode, input, relBuilder));
    }
}

19 Source : DremioFieldTrimmer.java
with Apache License 2.0
from dremio

public clreplaced DremioFieldTrimmer extends RelFieldTrimmer {

    private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DremioFieldTrimmer.clreplaced);

    private final RelBuilder builder;

    private final boolean isRelPlanning;

    public static DremioFieldTrimmer of(RelOptCluster cluster, boolean isRelPlanning) {
        RelBuilder builder = DremioRelFactories.CALCITE_LOGICAL_BUILDER.create(cluster, null);
        return new DremioFieldTrimmer(builder, isRelPlanning);
    }

    public static DremioFieldTrimmer of(RelBuilder builder) {
        return new DremioFieldTrimmer(builder, false);
    }

    private DremioFieldTrimmer(RelBuilder builder, boolean isRelPlanning) {
        super(null, builder);
        this.builder = builder;
        this.isRelPlanning = isRelPlanning;
    }

    // Override this method to make CorrelVariable have updated field list after trimming subtree under correlate rel
    @Override
    protected TrimResult result(RelNode r, final Mapping mapping) {
        final RexBuilder rexBuilder = builder.getRexBuilder();
        for (final CorrelationId correlation : r.getVariablesSet()) {
            r = r.accept(new CorrelationReferenceFinder() {

                protected RexNode handle(RexFieldAccess fieldAccess) {
                    final RexCorrelVariable v = (RexCorrelVariable) fieldAccess.getReferenceExpr();
                    if (v.id.equals(correlation)) {
                        final int old = fieldAccess.getField().getIndex();
                        final int new_ = mapping.getTarget(old);
                        final RelDataTypeFactory.Builder typeBuilder = builder.getTypeFactory().builder();
                        for (IntPair pair : mapping) {
                            if (pair.source < v.getType().getFieldCount()) {
                                typeBuilder.add(v.getType().getFieldList().get(pair.source));
                            }
                        }
                        final RexNode newV = rexBuilder.makeCorrel(typeBuilder.build(), v.id);
                        if (old != new_) {
                            return rexBuilder.makeFieldAccess(newV, new_);
                        }
                    }
                    return fieldAccess;
                }
            });
        }
        return new TrimResult(r, mapping);
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for {@link Aggregate}.
     */
    @Override
    public TrimResult trimFields(Aggregate aggregate, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        int fieldCount = aggregate.getRowType().getFieldCount();
        if (fieldCount == 0) {
            // If the input has no fields, we cannot trim anything.
            return new TrimResult(aggregate, Mappings.createIdenreplacedy(fieldCount));
        }
        return super.trimFields(aggregate, fieldsUsed, extraFields);
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for {@link com.dremio.exec.planner.logical.CorrelateRel}.
     */
    public TrimResult trimFields(Correlate correlate, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        final RelDataType rowType = correlate.getRowType();
        final int fieldCount = rowType.getFieldCount();
        final ImmutableBitSet.Builder inputBitSet = ImmutableBitSet.builder();
        final List<RelNode> newInputs = new ArrayList<>(2);
        final RelMetadataQuery mq = correlate.getCluster().getMetadataQuery();
        final List<Mapping> inputMappings = new ArrayList<>();
        int newFieldCount = 0;
        // add all used fields including columns used by correlation
        inputBitSet.addAll(fieldsUsed);
        int changeCount = 0;
        inputBitSet.addAll(correlate.getRequiredColumns());
        int leftFieldCount = correlate.getLeft().getRowType().getFieldCount();
        int rightfieldCount = correlate.getRight().getRowType().getFieldCount();
        for (int i = leftFieldCount; i < leftFieldCount + rightfieldCount; i++) {
            inputBitSet.set(i);
        }
        final ImmutableBitSet fieldsUsedPlus = inputBitSet.build();
        // trim left and right by setting up ImmutableBitSet for each input
        // and add mappings from the result
        int offset = 0;
        for (RelNode input : correlate.getInputs()) {
            final RelDataType inputRowCount = input.getRowType();
            final int inputFieldCount = inputRowCount.getFieldCount();
            ImmutableBitSet.Builder inputFieldsUsed = ImmutableBitSet.builder();
            for (int bit : fieldsUsedPlus) {
                if (bit >= offset && bit < offset + inputFieldCount) {
                    inputFieldsUsed.set(bit - offset);
                }
            }
            // add collation
            final ImmutableList<RelCollation> collations = mq.collations(input);
            for (RelCollation collation : collations) {
                for (RelFieldCollation fieldCollation : collation.getFieldCollations()) {
                    inputFieldsUsed.set(fieldCollation.getFieldIndex());
                }
            }
            TrimResult trimResult = dispatchTrimFields(input, inputFieldsUsed.build(), extraFields);
            newInputs.add(trimResult.left);
            if (trimResult.left != input) {
                ++changeCount;
            }
            final Mapping inputMapping = trimResult.right;
            inputMappings.add(inputMapping);
            offset += inputFieldCount;
            newFieldCount += inputMapping.getTargetCount();
        }
        Mapping mapping = Mappings.create(MappingType.INVERSE_SURJECTION, fieldCount, newFieldCount);
        offset = 0;
        int newOffset = 0;
        // build mappings for this correlate rel using trim results for its children
        for (int i = 0; i < inputMappings.size(); i++) {
            Mapping inputMapping = inputMappings.get(i);
            for (IntPair pair : inputMapping) {
                mapping.set(pair.source + offset, pair.target + newOffset);
            }
            offset += inputMapping.getSourceCount();
            newOffset += inputMapping.getTargetCount();
        }
        if (changeCount == 0 && mapping.isIdenreplacedy()) {
            // no remapping required, return original one
            return result(correlate, Mappings.createIdenreplacedy(fieldCount));
        }
        // build a new correlate rel
        builder.push(newInputs.get(0));
        builder.push(newInputs.get(1));
        final ImmutableBitSet.Builder newRequiredField = ImmutableBitSet.builder();
        for (int bit : correlate.getRequiredColumns()) {
            newRequiredField.set(inputMappings.get(0).getTarget(bit));
        }
        List<RexNode> requiredNodes = newRequiredField.build().asList().stream().map(ord -> builder.getRexBuilder().makeInputRef(correlate, ord)).collect(Collectors.toList());
        builder.correlate(correlate.getJoinType(), correlate.getCorrelationId(), requiredNodes);
        RelNode newCorrelate = builder.build();
        for (final CorrelationId correlation : newCorrelate.getVariablesSet()) {
            newCorrelate.accept(new CorrelationReferenceFinder() {

                protected RexNode handle(RexFieldAccess fieldAccess) {
                    int ind = fieldAccess.getField().getIndex();
                    final RexCorrelVariable v = (RexCorrelVariable) fieldAccess.getReferenceExpr();
                    if (v.id.equals(correlation)) {
                        RelDataTypeField field = fieldAccess.getField();
                    }
                    return fieldAccess;
                }
            });
        }
        return result(newCorrelate, mapping);
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for {@link SampleCrel}.
     */
    public TrimResult trimFields(SampleCrel sampleCrel, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        TrimResult result = dispatchTrimFields(sampleCrel.getInput(), fieldsUsed, extraFields);
        return result(SampleCrel.create(result.left), result.right);
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for {@link ScanCrel}.
     */
    @SuppressWarnings("unused")
    public TrimResult trimFields(ScanCrel crel, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        if (fieldsUsed.cardinality() == crel.getRowType().getFieldCount()) {
            return result(crel, Mappings.createIdenreplacedy(crel.getRowType().getFieldCount()));
        }
        if (fieldsUsed.cardinality() == 0) {
            // do something similar to dummy project but avoid using a scan field. This ensures the scan
            // does a skipAll operation rather than projectin a useless column.
            final RelOptCluster cluster = crel.getCluster();
            final Mapping mapping = Mappings.create(MappingType.INVERSE_SURJECTION, crel.getRowType().getFieldCount(), 1);
            final RexLiteral expr = cluster.getRexBuilder().makeExactLiteral(BigDecimal.ZERO);
            builder.push(crel);
            builder.project(ImmutableList.<RexNode>of(expr), ImmutableList.of("DUMMY"));
            return result(builder.build(), mapping);
        }
        final List<SchemaPath> paths = new ArrayList<>();
        final Mapping m = Mappings.create(MappingType.PARTIAL_FUNCTION, crel.getRowType().getFieldCount(), fieldsUsed.cardinality());
        int index = 0;
        for (int i : fieldsUsed) {
            paths.add(SchemaPath.getSimplePath(crel.getRowType().getFieldList().get(i).getName()));
            m.set(i, index);
            index++;
        }
        ScanCrel newCrel = crel.cloneWithProject(paths);
        return result(newCrel, m);
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for {@link FilesystemScanDrel}.
     */
    @SuppressWarnings("unused")
    public TrimResult trimFields(FilesystemScanDrel drel, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        // if we've already pushed down projection of nested columns, we don't want to trim anymore
        if (drel.getProjectedColumns().stream().anyMatch(c -> !c.isSimplePath())) {
            return result(drel, Mappings.createIdenreplacedy(drel.getRowType().getFieldCount()));
        }
        ImmutableBitSet.Builder fieldBuilder = fieldsUsed.rebuild();
        Pointer<Boolean> failed = new Pointer<>(false);
        if (drel.getFilter() != null) {
            drel.getFilter().getConditions().forEach(c -> {
                SchemaPath path = c.getPath();
                String pathString = path.getAsUnescapedPath();
                RelDataTypeField field = drel.getRowType().getField(pathString, false, false);
                if (field != null) {
                    fieldBuilder.set(field.getIndex());
                } else {
                    failed.value = true;
                }
            });
        }
        if (failed.value) {
            return result(drel, Mappings.createIdenreplacedy(drel.getRowType().getFieldCount()));
        }
        final PruneFilterCondition pruneFilterCondition = drel.getParreplacedionFilter();
        if (pruneFilterCondition != null && pruneFilterCondition.getParreplacedionExpression() != null) {
            pruneFilterCondition.getParreplacedionExpression().accept(new RexShuttle() {

                @Override
                public RexNode visitInputRef(final RexInputRef inputRef) {
                    fieldBuilder.set(inputRef.getIndex());
                    return super.visitInputRef(inputRef);
                }
            });
        }
        fieldsUsed = fieldBuilder.build();
        if (fieldsUsed.cardinality() == drel.getRowType().getFieldCount()) {
            return result(drel, Mappings.createIdenreplacedy(drel.getRowType().getFieldCount()));
        }
        if (fieldsUsed.cardinality() == 0) {
            // do something similar to dummy project but avoid using a scan field. This ensures the scan
            // does a skipAll operation rather than projectin a useless column.
            final RelOptCluster cluster = drel.getCluster();
            final Mapping mapping = Mappings.create(MappingType.INVERSE_SURJECTION, drel.getRowType().getFieldCount(), 1);
            final RexLiteral expr = cluster.getRexBuilder().makeExactLiteral(BigDecimal.ZERO);
            builder.push(drel);
            builder.project(ImmutableList.<RexNode>of(expr), ImmutableList.of("DUMMY"));
            return result(builder.build(), mapping);
        }
        final List<SchemaPath> paths = new ArrayList<>();
        final Mapping m = Mappings.create(MappingType.PARTIAL_FUNCTION, drel.getRowType().getFieldCount(), fieldsUsed.cardinality());
        int index = 0;
        for (int i : fieldsUsed) {
            paths.add(SchemaPath.getSimplePath(drel.getRowType().getFieldList().get(i).getName()));
            m.set(i, index);
            index++;
        }
        FilesystemScanDrel newDrel = drel.cloneWithProject(paths, false);
        return result(newDrel, m);
    }

    // Overridden until CALCITE-2260 is fixed.
    @Override
    public TrimResult trimFields(SetOp setOp, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        if (!setOp.all) {
            return super.trimFields(setOp, ImmutableBitSet.range(setOp.getRowType().getFieldCount()), extraFields);
        }
        return super.trimFields(setOp, fieldsUsed, extraFields);
    }

    @Override
    public TrimResult trimFields(Project project, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        int count = FlattenVisitors.count(project.getProjects());
        // if there are no flatten, trim is fine.
        TrimResult result = super.trimFields(project, fieldsUsed, extraFields);
        // if it is rel planning mode, removing top project would generate wrong sql query.
        // make sure to have a top project after trimming
        if (isRelPlanning && !(result.left instanceof Project)) {
            List<RexNode> idenreplacedyProject = new ArrayList<>();
            for (int i = 0; i < result.left.getRowType().getFieldCount(); i++) {
                idenreplacedyProject.add(new RexInputRef(i, result.left.getRowType().getFieldList().get(i).getType()));
            }
            builder.push(result.left);
            result = result(builder.project(idenreplacedyProject, result.left.getRowType().getFieldNames(), true).build(), result.right);
        }
        if (count == 0) {
            return result;
        }
        // start by trimming based on super.
        if (result.left.getRowType().getFieldCount() != fieldsUsed.cardinality()) {
            // we got a partial trim, which we don't handle. Skip the optimization.
            return result(project, Mappings.createIdenreplacedy(project.getRowType().getFieldCount()));
        }
        final RelNode resultRel = result.left;
        final Mapping finalMapping = result.right;
        if (resultRel instanceof Project && FlattenVisitors.count(((Project) resultRel).getProjects()) == count) {
            // flatten count didn't change.
            return result;
        }
        /*
     * Flatten count changed. To solve, we'll actually increase the required fields to include the
     * flattens and then put another project on top that drops the extra fields, returning the
     * previously generated mapping.
     */
        ImmutableBitSet.Builder flattenColumnsBuilder = ImmutableBitSet.builder();
        {
            int i = 0;
            for (RexNode n : project.getProjects()) {
                try {
                    if (fieldsUsed.get(i)) {
                        continue;
                    }
                    if (!FlattenVisitors.hasFlatten(n)) {
                        continue;
                    }
                    // we have a flatten in an unused field.
                    flattenColumnsBuilder.set(i);
                } finally {
                    i++;
                }
            }
        }
        ImmutableBitSet unreferencedFlattenProjects = flattenColumnsBuilder.build();
        if (unreferencedFlattenProjects.isEmpty()) {
            // this should be impossible. fall back to using the base case (no column trim) as it means we just optimize less.
            logger.info("Failure while trying to trim flatten expression. Expressions {}, Columns to trim to: {}", project.getProjects(), fieldsUsed);
            return result(project, Mappings.createIdenreplacedy(project.getRowType().getFieldCount()));
        }
        final ImmutableBitSet fieldsIncludingFlattens = fieldsUsed.union(unreferencedFlattenProjects);
        final TrimResult result2 = super.trimFields(project, fieldsIncludingFlattens, extraFields);
        List<RexNode> finalProj = new ArrayList<>();
        builder.push(result2.left);
        int i = 0;
        for (int index : fieldsIncludingFlattens) {
            if (fieldsUsed.get(index)) {
                finalProj.add(builder.field(i));
            }
            i++;
        }
        // drop the flatten columns in a subsequent projection.
        return result(builder.project(finalProj).build(), finalMapping);
    }

    public TrimResult trimFields(LimitRel limit, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        final RelDataType rowType = limit.getRowType();
        final int fieldCount = rowType.getFieldCount();
        final RelNode input = limit.getInput();
        final Set<RelDataTypeField> inputExtraFields = Collections.emptySet();
        TrimResult trimResult = trimChild(limit, input, fieldsUsed, inputExtraFields);
        RelNode newInput = trimResult.left;
        final Mapping inputMapping = trimResult.right;
        if (newInput == input && inputMapping.isIdenreplacedy() && fieldsUsed.cardinality() == fieldCount) {
            return result(limit, Mappings.createIdenreplacedy(fieldCount));
        }
        return result(limit.copy(newInput.getTraitSet(), ImmutableList.of(newInput)), inputMapping);
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for {@link MultiJoin}.
     */
    @SuppressWarnings("unused")
    public TrimResult trimFields(MultiJoin multiJoin, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        // unlike #trimFields in RelFieldTrimmer
        Util.discard(extraFields);
        final List<RelNode> originalInputs = multiJoin.getInputs();
        final RexNode originalJoinFilter = multiJoin.getJoinFilter();
        final List<RexNode> originalOuterJoinConditions = multiJoin.getOuterJoinConditions();
        final RexNode originalPostJoinFilter = multiJoin.getPostJoinFilter();
        int fieldCount = 0;
        for (RelNode input : originalInputs) {
            fieldCount += input.getRowType().getFieldCount();
        }
        // add in fields used in the all the conditions; including the ones requested in "fieldsUsed"
        final RelOptUtil.InputFinder inputFinder = new RelOptUtil.InputFinder();
        inputFinder.inputBitSet.addAll(fieldsUsed);
        originalJoinFilter.accept(inputFinder);
        originalOuterJoinConditions.forEach(rexNode -> {
            if (rexNode != null) {
                rexNode.accept(inputFinder);
            }
        });
        if (originalPostJoinFilter != null) {
            originalPostJoinFilter.accept(inputFinder);
        }
        final ImmutableBitSet fieldsUsedPlus = inputFinder.inputBitSet.build();
        int offset = 0;
        int changeCount = 0;
        int newFieldCount = 0;
        final List<RelNode> newInputs = Lists.newArrayListWithExpectedSize(originalInputs.size());
        final List<Mapping> inputMappings = Lists.newArrayList();
        for (RelNode input : originalInputs) {
            final RelDataType inputRowType = input.getRowType();
            final int inputFieldCount = inputRowType.getFieldCount();
            // compute required mapping
            final ImmutableBitSet.Builder inputFieldsUsed = ImmutableBitSet.builder();
            for (int bit : fieldsUsedPlus) {
                if (bit >= offset && bit < offset + inputFieldCount) {
                    inputFieldsUsed.set(bit - offset);
                }
            }
            final TrimResult trimResult = trimChild(multiJoin, input, inputFieldsUsed.build(), Collections.emptySet());
            newInputs.add(trimResult.left);
            // noinspection ObjectEquality
            if (trimResult.left != input) {
                ++changeCount;
            }
            final Mapping inputMapping = trimResult.right;
            inputMappings.add(inputMapping);
            // move offset to point to start of next input
            offset += inputFieldCount;
            newFieldCount += inputMapping.getTargetCount();
        }
        final Mapping mapping = Mappings.create(MappingType.INVERSE_SURJECTION, fieldCount, newFieldCount);
        offset = 0;
        int newOffset = 0;
        for (final Mapping inputMapping : inputMappings) {
            ImmutableBitSet.Builder projBuilder = ImmutableBitSet.builder();
            for (final IntPair pair : inputMapping) {
                mapping.set(pair.source + offset, pair.target + newOffset);
            }
            offset += inputMapping.getSourceCount();
            newOffset += inputMapping.getTargetCount();
        }
        if (changeCount == 0 && mapping.isIdenreplacedy()) {
            result(multiJoin, Mappings.createIdenreplacedy(fieldCount));
        }
        // build new MultiJoin
        final RexVisitor<RexNode> inputFieldPermuter = new RexPermuteInputsShuttle(mapping, newInputs.toArray(new RelNode[0]));
        final RexNode newJoinFilter = originalJoinFilter.accept(inputFieldPermuter);
        // row type is simply re-mapped
        final List<RelDataTypeField> originalFieldList = multiJoin.getRowType().getFieldList();
        final RelDataType newRowType = new RelRecordType(StreamSupport.stream(mapping.spliterator(), false).map(pair -> pair.source).map(originalFieldList::get).map(originalField -> new RelDataTypeFieldImpl(originalField.getName(), mapping.getTarget(originalField.getIndex()), originalField.getType())).collect(Collectors.toList()));
        final List<RexNode> newOuterJoinConditions = originalOuterJoinConditions.stream().map(expr -> expr == null ? null : expr.accept(inputFieldPermuter)).collect(Collectors.toList());
        // see MultiJoin#getProjFields; ideally all input fields must be used, and this is a list of "nulls"
        final List<ImmutableBitSet> newProjFields = Lists.newArrayList();
        for (final Ord<Mapping> inputMapping : Ord.zip(inputMappings)) {
            if (multiJoin.getProjFields().get(inputMapping.i) == null) {
                newProjFields.add(null);
                continue;
            }
            ImmutableBitSet.Builder projBuilder = ImmutableBitSet.builder();
            for (final IntPair pair : inputMapping.e) {
                if (multiJoin.getProjFields().get(inputMapping.i).get(pair.source)) {
                    projBuilder.set(pair.target);
                }
            }
            newProjFields.add(projBuilder.build());
        }
        final ImmutableMap<Integer, ImmutableIntList> newJoinFieldRefCountsMap = computeJoinFieldRefCounts(newInputs, newFieldCount, newJoinFilter);
        final RexNode newPostJoinFilter = originalPostJoinFilter == null ? null : originalPostJoinFilter.accept(inputFieldPermuter);
        final MultiJoin newMultiJoin = new MultiJoin(multiJoin.getCluster(), newInputs, newJoinFilter, newRowType, multiJoin.isFullOuterJoin(), newOuterJoinConditions, multiJoin.getJoinTypes(), newProjFields, newJoinFieldRefCountsMap, newPostJoinFilter);
        return result(newMultiJoin, mapping);
    }

    /**
     * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
     * {@link com.dremio.exec.planner.logical.JoinRel}.
     * This sets ImmutableBitSet to JoinRel which indicates inputs used by its consumer
     */
    public TrimResult trimFields(JoinRel join, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        if (fieldsUsed.cardinality() == 0) {
            fieldsUsed = fieldsUsed.set(0);
        }
        TrimResult result = super.trimFields(join, fieldsUsed, extraFields);
        Join rel = (Join) result.left;
        Mapping mapping = result.right;
        ImmutableBitSet projectedFields = ImmutableBitSet.of(fieldsUsed.asList().stream().map(mapping::getTarget).collect(Collectors.toList()));
        RelNode newJoin = JoinRel.create(rel.getCluster(), rel.getTraitSet(), rel.getLeft(), rel.getRight(), rel.getCondition(), rel.getJoinType(), projectedFields);
        final Mapping map = Mappings.create(MappingType.INVERSE_SURJECTION, join.getRowType().getFieldCount(), newJoin.getRowType().getFieldCount());
        int j = 0;
        for (int i = 0; i < join.getRowType().getFieldCount(); i++) {
            if (fieldsUsed.get(i)) {
                map.set(i, j);
                j++;
            }
        }
        return result(newJoin, map);
    }

    public TrimResult trimFields(LogicalWindow window, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
        // Fields:
        // 
        // Window rowtype
        // | input fields | agg functions |
        // 
        // Rowtype used internally by the Window operator for groups:
        // | input fields | constants (hold by Window node) |
        // 
        // Input rowtype:
        // | input fields |
        // 
        // Trimming operations:
        // 1. If an agg is not used, don't include it in the group
        // 2. If a group is not used, remove the group
        // 3. If no group, skip the operator
        // 4. If a constant is not used, don't include it in the new window operator
        // 
        // General description of the algorithm:
        // 1. Identify input fields and constants in use
        // - input fields directly used by caller
        // - input fields used by agg call, if agg call used by caller
        // - input fields used by window group if at least one agg from the group is used by caller
        // - constants used by call call, if agg call used by caller
        // 2. Trim input
        // - only use list of used input fields (do not include constants) when calling for trimChild
        // as it will confuse callee.
        // 3. Create new operator and final mapping
        // - create a mapping combining input mapping and constants in use to rewrite expressions
        // - if no agg is actually used by caller, return early with the new input and a copy of the
        // input mapping matching the number of fields (skip the current operator)
        // - Go over each group/agg call used by caller, and rewrite them by visiting them with the
        // mapping combining input and constants
        // - create a new window operator and return operator and mapping to caller
        final RelDataType rowType = window.getRowType();
        final int fieldCount = rowType.getFieldCount();
        final RelNode input = window.getInput();
        final int inputFieldCount = input.getRowType().getFieldCount();
        final Set<RelDataTypeField> inputExtraFields = new LinkedHashSet<>(extraFields);
        final RelOptUtil.InputFinder inputFinder = new RelOptUtil.InputFinder(inputExtraFields);
        // 
        // 1. Identify input fields and constants in use
        // 
        for (Integer bit : fieldsUsed) {
            if (bit >= inputFieldCount) {
                // exit if it goes over the input fields
                break;
            }
            inputFinder.inputBitSet.set(bit);
        }
        // number of agg calls and agg calls actually used
        int aggCalls = 0;
        int aggCallsUsed = 0;
        // Capture which input fields and constants are used by the agg calls
        // thanks to the visitor
        for (final Window.Group group : window.groups) {
            boolean groupUsed = false;
            for (final RexWinAggCall aggCall : group.aggCalls) {
                int offset = inputFieldCount + aggCalls;
                aggCalls++;
                if (!fieldsUsed.get(offset)) {
                    continue;
                }
                aggCallsUsed++;
                groupUsed = true;
                aggCall.accept(inputFinder);
            }
            // If no agg from the group are being used, do not include group fields
            if (!groupUsed) {
                continue;
            }
            group.lowerBound.accept(inputFinder);
            group.upperBound.accept(inputFinder);
            // Add parreplacedion fields
            inputFinder.inputBitSet.addAll(group.keys);
            // Add collation fields
            for (RelFieldCollation fieldCollation : group.collation().getFieldCollations()) {
                inputFinder.inputBitSet.set(fieldCollation.getFieldIndex());
            }
        }
        // Create the final bitset containing both input and constants used
        final ImmutableBitSet inputAndConstantsFieldsUsed = inputFinder.inputBitSet.build();
        // 
        // 2. Trim input
        // 
        // Create input with trimmed columns. Need a bitset containing only input fields
        final ImmutableBitSet inputFieldsUsed = inputAndConstantsFieldsUsed.intersect(ImmutableBitSet.range(inputFieldCount));
        final int constantsUsed = inputAndConstantsFieldsUsed.cardinality() - inputFieldsUsed.cardinality();
        final TrimResult trimResult = trimChild(window, input, inputFieldsUsed, inputExtraFields);
        RelNode newInput = trimResult.left;
        Mapping inputMapping = trimResult.right;
        // If the input is unchanged, and we need to project all columns,
        // there's nothing we can do.
        if (newInput == input && fieldsUsed.cardinality() == fieldCount) {
            return result(window, Mappings.createIdenreplacedy(fieldCount));
        }
        // 
        // 3. Create new operator and final mapping
        // 
        // if new input cardinality is 0, create a dummy project
        // Note that the returned mapping is not a valid INVERSE_SURJECTION mapping
        // as it does not include a source for each target!
        if (inputFieldsUsed.cardinality() == 0) {
            final TrimResult dummyResult = dummyProject(inputFieldCount, newInput);
            newInput = dummyResult.left;
            inputMapping = dummyResult.right;
        }
        // Create a new input mapping which includes constants
        final int newInputFieldCount = newInput.getRowType().getFieldCount();
        final Mapping inputAndConstantsMapping = Mappings.create(MappingType.INVERSE_SURJECTION, inputFieldCount + window.constants.size(), newInputFieldCount + constantsUsed);
        // include input mappping
        inputMapping.forEach(pair -> inputAndConstantsMapping.set(pair.source, pair.target));
        // Add constant mapping (while trimming list of constants)
        final List<RexLiteral> newConstants = new ArrayList<>();
        for (int i = 0; i < window.constants.size(); i++) {
            int index = inputFieldCount + i;
            if (inputAndConstantsFieldsUsed.get(index)) {
                inputAndConstantsMapping.set(index, newInputFieldCount + newConstants.size());
                newConstants.add(window.constants.get(i));
            }
        }
        // Create a new mapping. Include all the input fields
        final Mapping mapping = Mappings.create(MappingType.INVERSE_SURJECTION, fieldCount, newInputFieldCount + aggCallsUsed);
        // Remap the field projects (1:1 remap)
        inputMapping.forEach(pair -> mapping.set(pair.source, pair.target));
        // Degenerated case: no agg calls being used, skip this operator!
        if (aggCallsUsed == 0) {
            return result(newInput, mapping);
        }
        // Create/Rewrite a new window operator by dropping unnecessary groups/agg calls
        // and permutting the inputs
        final RexPermuteInputsShuttle shuttle = new RexPermuteInputsShuttle(inputAndConstantsMapping, newInput);
        final List<Window.Group> newGroups = new ArrayList<>();
        int oldOffset = inputFieldCount;
        int newOffset = newInputFieldCount;
        for (final Window.Group group : window.groups) {
            final List<RexWinAggCall> newCalls = new ArrayList<>();
            for (final RexWinAggCall agg : group.aggCalls) {
                if (!fieldsUsed.get(oldOffset)) {
                    // Skip unused aggs
                    oldOffset++;
                    continue;
                }
                RexWinAggCall newCall = permuteWinAggCall(agg, shuttle, newCalls.size());
                newCalls.add(newCall);
                mapping.set(oldOffset, newOffset);
                oldOffset++;
                newOffset++;
            }
            // If no agg from the group, let's skip the group
            if (newCalls.isEmpty()) {
                continue;
            }
            final RexWindowBound newLowerBound = group.lowerBound.accept(shuttle);
            final RexWindowBound newUpperBound = group.upperBound.accept(shuttle);
            // Remap parreplacedion fields
            final ImmutableBitSet newKeys = Mappings.apply(inputAndConstantsMapping, group.keys);
            // Remap collation fields
            final List<RelFieldCollation> newFieldCollations = new ArrayList<>();
            for (RelFieldCollation fieldCollation : group.collation().getFieldCollations()) {
                newFieldCollations.add(fieldCollation.copy(inputAndConstantsMapping.getTarget(fieldCollation.getFieldIndex())));
            }
            final Window.Group newGroup = new Window.Group(newKeys, group.isRows, newLowerBound, newUpperBound, RelCollations.of(newFieldCollations), newCalls);
            newGroups.add(newGroup);
        }
        final Mapping permutationMapping;
        // If no input column being used, still need to include the dummy column in the row type
        // by temporarily adding it to the mapping
        if (inputFieldsUsed.cardinality() != 0) {
            permutationMapping = mapping;
        } else {
            permutationMapping = Mappings.create(MappingType.INVERSE_SURJECTION, mapping.getSourceCount(), mapping.getTargetCount());
            mapping.forEach(pair -> permutationMapping.set(pair.source, pair.target));
            // set a fake mapping for the dummy project
            permutationMapping.set(0, 0);
        }
        final RelDataType newRowType = RelOptUtil.permute(window.getCluster().getTypeFactory(), rowType, permutationMapping);
        // TODO: should there be a relbuilder for window?
        final LogicalWindow newWindow = LogicalWindow.create(window.getTraitSet(), newInput, newConstants, newRowType, newGroups);
        return result(newWindow, mapping);
    }

    private static RexWinAggCall permuteWinAggCall(RexWinAggCall call, RexPermuteInputsShuttle shuttle, int newOrdinal) {
        // Cast should be safe as the shuttle creates new RexCall instance (but not RexWinAggCall)
        RexCall newCall = (RexCall) call.accept(shuttle);
        if (newCall == call && newOrdinal == call.ordinal) {
            return call;
        }
        return new RexWinAggCall((SqlAggFunction) call.getOperator(), call.getType(), newCall.getOperands(), // remap parreplacedion ordinal
        newOrdinal, call.distinct);
    }

    /**
     * Compute the reference counts of fields in the inputs from the new join condition.
     *
     * @param inputs          inputs into the new MultiJoin
     * @param totalFieldCount total number of fields in the MultiJoin
     * @param joinCondition   the new join condition
     * @return Map containing the new join condition
     */
    private static ImmutableMap<Integer, ImmutableIntList> computeJoinFieldRefCounts(final List<RelNode> inputs, final int totalFieldCount, final RexNode joinCondition) {
        // count the input references in the join condition
        final int[] joinCondRefCounts = new int[totalFieldCount];
        joinCondition.accept(new InputReferenceCounter(joinCondRefCounts));
        final Map<Integer, int[]> refCountsMap = Maps.newHashMap();
        final int numInputs = inputs.size();
        int currInput = 0;
        for (final RelNode input : inputs) {
            refCountsMap.put(currInput++, new int[input.getRowType().getFieldCount()]);
        }
        // add on to the counts for each input into the MultiJoin the
        // reference counts computed for the current join condition
        currInput = -1;
        int startField = 0;
        int inputFieldCount = 0;
        for (int i = 0; i < totalFieldCount; i++) {
            if (joinCondRefCounts[i] == 0) {
                continue;
            }
            while (i >= (startField + inputFieldCount)) {
                startField += inputFieldCount;
                currInput++;
                replacedert currInput < numInputs;
                inputFieldCount = inputs.get(currInput).getRowType().getFieldCount();
            }
            final int[] refCounts = refCountsMap.get(currInput);
            refCounts[i - startField] += joinCondRefCounts[i];
        }
        final ImmutableMap.Builder<Integer, ImmutableIntList> builder = ImmutableMap.builder();
        for (final Map.Entry<Integer, int[]> entry : refCountsMap.entrySet()) {
            builder.put(entry.getKey(), ImmutableIntList.of(entry.getValue()));
        }
        return builder.build();
    }

    /**
     * Visitor that keeps a reference count of the inputs used by an expression.
     * <p>
     * Duplicates {@link org.apache.calcite.rel.rules.JoinToMultiJoinRule.InputReferenceCounter}.
     */
    private static clreplaced InputReferenceCounter extends RexVisitorImpl<Void> {

        private final int[] refCounts;

        InputReferenceCounter(int[] refCounts) {
            super(true);
            this.refCounts = refCounts;
        }

        @Override
        public Void visitInputRef(RexInputRef inputRef) {
            refCounts[inputRef.getIndex()]++;
            return null;
        }
    }
}

19 Source : DremioFieldTrimmer.java
with Apache License 2.0
from dremio

public static DremioFieldTrimmer of(RelBuilder builder) {
    return new DremioFieldTrimmer(builder, false);
}

19 Source : DremioRelDecorrelator.java
with Apache License 2.0
from dremio

public static RelNode decorrelateQuery(RelNode rootRel, RelBuilder relBuilder, boolean isRelPlanning) {
    final RelNode decorrelatedWithValueGenerator = decorrelateQuery(rootRel, relBuilder, true, isRelPlanning);
    if (correlateCount(decorrelatedWithValueGenerator) != 0) {
        return decorrelateQuery(rootRel, relBuilder, false, isRelPlanning);
    }
    return decorrelatedWithValueGenerator;
}

18 Source : HBTQueryConvertor.java
with GNU General Public License v3.0
from MyCATApache

// import io.mycat.calcite.rules.PushDownLogicTableRule;
/**
 * @author jamie12221
 */
public clreplaced HBTQueryConvertor {

    final static Logger log = LoggerFactory.getLogger(HBTQueryConvertor.clreplaced);

    private final List<Object> params;

    private final RelBuilder relBuilder;

    private final Map<String, RexCorrelVariable> correlVariableMap = new HashMap<>();

    private int joinCount;

    private int paramIndex = 0;

    private final MetaDataFetcher metaDataFetcher;

    public HBTQueryConvertor(List<Object> params, RelBuilder relBuilder) {
        this.params = params;
        this.relBuilder = relBuilder;
        this.relBuilder.clear();
        metaDataFetcher = (targetName, sql) -> {
            try {
                JdbcConnectionManager jdbcConnectionManager = MetaClusterCurrent.wrapper(JdbcConnectionManager.clreplaced);
                try (DefaultConnection mycatConnection = jdbcConnectionManager.getConnection(targetName)) {
                    Connection rawConnection = mycatConnection.getRawConnection();
                    try (Statement statement = rawConnection.createStatement()) {
                        statement.setMaxRows(0);
                        try (ResultSet resultSet = statement.executeQuery(sql)) {
                            ResultSetMetaData metaData = resultSet.getMetaData();
                            JdbcRowMetaData jdbcRowMetaData = new JdbcRowMetaData(metaData);
                            return FieldTypes.getFieldTypes(jdbcRowMetaData);
                        }
                    } catch (SQLException e) {
                        log.warn("{}", e);
                    }
                    return null;
                }
            } catch (Throwable e) {
                log.warn("{0}", e);
            }
            return null;
        };
    }

    public interface MetaDataFetcher {

        List<FieldType> query(String targetName, String sql);
    }

    private SqlOperator op(String op) {
        SqlOperator sqlOperator = HBTCalciteSupport.INSTANCE.getSqlOperator(op);
        if (sqlOperator == null) {
            throw new replacedertionError("unknown: " + op);
        }
        return sqlOperator;
    }

    private List<RelNode> handle(List<Schema> inputs) {
        return inputs.stream().map(this::handle).collect(Collectors.toList());
    }

    public RelNode complie(Schema root) {
        return handle(root);
    }

    public RelNode handle(Schema input) {
        relBuilder.clear();
        try {
            switch(input.getOp()) {
                case FROM_TABLE:
                    return fromTable((FromTableSchema) input);
                case FROM_SQL:
                    return fromSql((FromSqlSchema) input);
                case FROM_REL_TO_SQL:
                    {
                        return fromRelToSqlSchema((FromRelToSqlSchema) input);
                    }
                case FILTER_FROM_TABLE:
                    {
                        return filterFromTable((FilterFromTableSchema) input);
                    }
                case MAP:
                    return map((MapSchema) input);
                case FILTER:
                    return filter((FilterSchema) input);
                case LIMIT:
                    return limit((LimitSchema) input);
                case ORDER:
                    return order((OrderSchema) input);
                case GROUP:
                    return group((GroupBySchema) input);
                case TABLE:
                    return values((AnonyTableSchema) input);
                case DISTINCT:
                    return distinct((DistinctSchema) input);
                case UNION_ALL:
                case UNION_DISTINCT:
                case EXCEPT_ALL:
                case EXCEPT_DISTINCT:
                case INTERSECT_DISTINCT:
                case INTERSECT_ALL:
                    return setSchema((SetOpSchema) input);
                case LEFT_JOIN:
                case RIGHT_JOIN:
                case FULL_JOIN:
                case SEMI_JOIN:
                case ANTI_JOIN:
                case INNER_JOIN:
                    return correlateJoin((JoinSchema) input);
                case RENAME:
                    return rename((RenameSchema) input);
                case CORRELATE_INNER_JOIN:
                case CORRELATE_LEFT_JOIN:
                    return correlate((CorrelateSchema) input);
                case MODIFY_FROM_SQL:
                    return modifyFromSql((ModifyFromSql) input);
                default:
            }
        } finally {
            relBuilder.clear();
        }
        throw new UnsupportedOperationException(input.getOp().getFun());
    }

    private RelNode modifyFromSql(ModifyFromSql input) {
        throw new UnsupportedOperationException();
    }

    public RelNode filterFromTable(FilterFromTableSchema input) {
        List<String> names = input.getNames();
        relBuilder.scan(names);
        TableScan tableScan = (TableScan) relBuilder.peek();
        RelOptTable table = tableScan.getTable();
        relBuilder.as(names.get(names.size() - 1));
        relBuilder.filter(toRex(input.getFilter()));
        Filter build = (Filter) relBuilder.build();
        relBuilder.clear();
        MycatLogicTable mycatTable = table.unwrap(MycatLogicTable.clreplaced);
        Distribution distribution = mycatTable.createDistribution();
        Iterable<DataNode> dataNodes = distribution.getDataNodes().flatMap(i -> i.values().stream()).collect(Collectors.toList());
        return build.copy(build.getTraitSet(), ImmutableList.of(toPhyTable(mycatTable, dataNodes)));
    }

    private RelNode fromRelToSqlSchema(FromRelToSqlSchema input) {
        Schema rel = input.getRel();
        RelNode handle = handle(rel);
        return makeTransientSQLScan(input.getTargetName(), handle, false);
    }

    @SneakyThrows
    public RelNode fromSql(FromSqlSchema input) {
        String targetName = input.getTargetName();
        String sql = input.getSql();
        List<FieldType> fieldTypes = input.getFieldTypes();
        RelDataType relDataType = null;
        if (fieldTypes == null || fieldTypes.isEmpty()) {
            relDataType = tryGetRelDataTypeByParse(sql);
            if (relDataType == null) {
                List<FieldType> fieldTypeList = metaDataFetcher.query(targetName, sql);
                if (fieldTypeList != null) {
                    relDataType = toType(fieldTypeList);
                }
            }
        } else {
            relDataType = toType(fieldTypes);
        }
        Objects.requireNonNull(relDataType, "无法推导sql结果集类型");
        return makeBySql(relDataType, targetName, sql);
    }

    private RelDataType tryGetRelDataTypeByParse(String sql) {
        try {
            SQLStatement sqlStatement = SQLUtils.parseSingleMysqlStatement(sql);
            MetadataManager metadataManager = MetaClusterCurrent.wrapper(MetadataManager.clreplaced);
            metadataManager.resolveMetadata(sqlStatement);
            if (sqlStatement instanceof SQLSelectStatement) {
                SQLSelectQueryBlock firstQueryBlock = ((SQLSelectStatement) sqlStatement).getSelect().getFirstQueryBlock();
                final RelDataTypeFactory typeFactory = MycatCalciteSupport.INSTANCE.TypeFactory;
                final RelDataTypeFactory.Builder builder = typeFactory.builder();
                for (SQLSelecreplacedem sqlSelecreplacedem : firstQueryBlock.getSelectList()) {
                    SQLDataType sqlDataType = sqlSelecreplacedem.computeDataType();
                    if (sqlDataType == null) {
                        return null;
                    }
                    SqlTypeName type = HBTCalciteSupport.INSTANCE.getSqlTypeByJdbcValue(sqlDataType.jdbcType());
                    if (type == null) {
                        return null;
                    }
                    builder.add(sqlSelecreplacedem.toString(), type);
                }
                return builder.build();
            }
        } catch (Throwable e) {
            log.warn("", e);
        }
        return null;
    }

    private RelNode correlate(CorrelateSchema input) {
        RelNode left = handle(input.getLeft());
        Holder<RexCorrelVariable> of = Holder.of(null);
        relBuilder.push(left);
        relBuilder.variable(of);
        correlVariableMap.put(input.getRefName(), of.get());
        RelNode right = handle(input.getRight());
        relBuilder.clear();
        final CorrelationId id = of.get().id;
        final ImmutableBitSet requiredColumns = RelOptUtil.correlationColumns(id, right);
        relBuilder.push(left);
        List<RexInputRef> collect = requiredColumns.asList().stream().map(i -> relBuilder.field(i)).collect(Collectors.toList());
        relBuilder.clear();
        relBuilder.push(left);
        relBuilder.push(right);
        return relBuilder.correlate(joinOp(input.getOp()), of.get().id, collect).build();
    }

    private RelNode rename(RenameSchema input) {
        RelNode origin = handle(input.getSchema());
        List<String> fieldNames = new ArrayList<>(origin.getRowType().getFieldNames());
        List<String> alias = input.getAlias();
        int size = alias.size();
        for (int i = 0; i < size; i++) {
            fieldNames.set(i, alias.get(i));
        }
        if (alias.isEmpty()) {
            return origin;
        } else {
            relBuilder.clear();
            relBuilder.push(origin);
            relBuilder.rename(fieldNames);
            return relBuilder.build();
        }
    }

    private RelNode correlateJoin(JoinSchema input) {
        List<Schema> schemas = input.getSchemas();
        joinCount = schemas.size();
        try {
            ArrayList<RelNode> nodes = new ArrayList<>(schemas.size());
            HashSet<String> set = new HashSet<>();
            for (Schema schema : schemas) {
                HBTQueryConvertor queryOp = new HBTQueryConvertor(params, relBuilder);
                RelNode relNode = queryOp.complie(schema);
                List<String> fieldNames = relNode.getRowType().getFieldNames();
                if (!set.addAll(fieldNames)) {
                    log.warn("dup fieldNames:" + fieldNames);
                }
                nodes.add(relNode);
            }
            for (RelNode relNode : nodes) {
                relBuilder.push(relNode);
            }
            if (input.getCondition() != null) {
                RexNode rexNode = null;
                try {
                    rexNode = toRex(input.getCondition());
                } catch (Exception e) {
                    e.printStackTrace();
                }
                Set<CorrelationId> collect = correlVariableMap.values().stream().filter(i -> i instanceof RexCorrelVariable).map(i -> i.id).collect(Collectors.toSet());
                return relBuilder.join(joinOp(input.getOp()), rexNode, collect).build();
            } else {
                return relBuilder.join(joinOp(input.getOp())).build();
            }
        } finally {
            joinCount = 0;
        }
    }

    private JoinRelType joinOp(HBTOp op) {
        switch(op) {
            case INNER_JOIN:
                return JoinRelType.INNER;
            case LEFT_JOIN:
                return JoinRelType.LEFT;
            case RIGHT_JOIN:
                return JoinRelType.RIGHT;
            case FULL_JOIN:
                return JoinRelType.FULL;
            case SEMI_JOIN:
                return JoinRelType.SEMI;
            case ANTI_JOIN:
                return JoinRelType.ANTI;
            case CORRELATE_INNER_JOIN:
                return JoinRelType.INNER;
            case CORRELATE_LEFT_JOIN:
                return JoinRelType.LEFT;
            default:
                throw new UnsupportedOperationException();
        }
    }

    private RelNode setSchema(SetOpSchema input) {
        int size = input.getSchemas().size();
        if (size > 2) {
            throw new UnsupportedOperationException("set op size must equals 2");
        }
        List<RelNode> nodeList = handle(input.getSchemas());
        if (nodeList.isEmpty()) {
            throw new IllegalArgumentException();
        }
        RelBuilder relBuilder = this.relBuilder.pushAll(nodeList);
        switch(input.getOp()) {
            case UNION_DISTINCT:
                return relBuilder.union(false, size).build();
            case UNION_ALL:
                return relBuilder.union(true, size).build();
            case EXCEPT_DISTINCT:
                return relBuilder.minus(false, size).build();
            case EXCEPT_ALL:
                return relBuilder.minus(true, size).build();
            case INTERSECT_DISTINCT:
                return relBuilder.intersect(false, size).build();
            case INTERSECT_ALL:
                return relBuilder.intersect(true, size).build();
            default:
                throw new UnsupportedOperationException();
        }
    }

    private RelNode group(GroupBySchema input) {
        relBuilder.push(handle(input.getSchema()));
        RelBuilder.GroupKey groupKey = groupItemListToRex(input.getKeys());
        return relBuilder.aggregate(groupKey, toAggregateCall(input.getExprs())).build();
    }

    private RelBuilder.GroupKey groupItemListToRex(List<GroupKey> keys) {
        ImmutableList.Builder<ImmutableList<RexNode>> builder = builder();
        for (GroupKey key : keys) {
            List<RexNode> nodes = toRex(key.getExprs());
            builder.add(ImmutableList.copyOf(nodes));
        }
        ImmutableList<ImmutableList<RexNode>> build = builder.build();
        if (build.size() == 1) {
            return relBuilder.groupKey(build.get(0));
        } else {
            return relBuilder.groupKey(build.stream().flatMap(u -> u.stream()).collect(Collectors.toList()), build);
        }
    }

    private List<RelBuilder.AggCall> toAggregateCall(List<AggregateCall> exprs) {
        return exprs.stream().map(this::toAggregateCall).collect(Collectors.toList());
    }

    private RelBuilder.AggCall toAggregateCall(AggregateCall expr) {
        return relBuilder.aggregateCall(toSqlAggFunction(expr.getFunction()), toRex(expr.getOperands() == null ? Collections.emptyList() : expr.getOperands())).sort(expr.getOrderKeys() == null ? Collections.emptyList() : toSortRex(expr.getOrderKeys())).distinct(Boolean.TRUE.equals(expr.getDistinct())).approximate(Boolean.TRUE.equals(expr.getApproximate())).ignoreNulls(Boolean.TRUE.equals(expr.getIgnoreNulls())).filter(expr.getFilter() == null ? null : toRex(expr.getFilter()));
    }

    private SqlAggFunction toSqlAggFunction(String op) {
        SqlAggFunction sqlAggFunction = HBTCalciteSupport.INSTANCE.getAggFunction(op);
        if (sqlAggFunction == null) {
            throw new UnsupportedOperationException();
        }
        return sqlAggFunction;
    }

    private RelNode fromTable(FromTableSchema input) {
        List<String> collect = new ArrayList<>(input.getNames());
        RelNode build = relBuilder.scan(collect).as(collect.get(collect.size() - 1)).build();
        MycatLogicTable mycatLogicTable = build.getTable().unwrap(MycatLogicTable.clreplaced);
        // 消除逻辑表,变成物理表
        if (mycatLogicTable != null) {
            relBuilder.clear();
            Stream<Map<String, DataNode>> dataNodes = mycatLogicTable.createDistribution().getDataNodes();
            return toPhyTable(mycatLogicTable, dataNodes.flatMap(i -> i.values().stream()).collect(Collectors.toList()));
        }
        return build;
    }

    private RelNode toPhyTable(MycatLogicTable unwrap, Iterable<DataNode> dataNodes) {
        int count = 0;
        for (DataNode dataNode : dataNodes) {
            MycatPhysicalTable mycatPhysicalTable = new MycatPhysicalTable(unwrap, dataNode);
            LogicalTableScan tableScan = LogicalTableScan.create(relBuilder.getCluster(), RelOptTableImpl.create(relBuilder.getRelOptSchema(), unwrap.getRowType(), mycatPhysicalTable, ImmutableList.of(dataNode.getTargetName(), dataNode.getSchema(), dataNode.getTable())), ImmutableList.of());
            count++;
            relBuilder.push(tableScan);
        }
        return relBuilder.union(true, count).build();
    }

    private RelNode map(MapSchema input) {
        RelNode handle = handle(input.getSchema());
        relBuilder.push(handle);
        List<RexNode> nodes = toRex(input.getExpr());
        relBuilder.push(handle);
        relBuilder.project(nodes);
        return relBuilder.build();
    }

    private RelNode filter(FilterSchema input) {
        relBuilder.push(handle(input.getSchema()));
        RexNode rexNode = toRex(input.getExpr());
        if (correlVariableMap.isEmpty()) {
            relBuilder.filter(rexNode);
        } else {
            relBuilder.filter(correlVariableMap.values().stream().map(i -> i.id).collect(Collectors.toList()), rexNode);
        }
        return relBuilder.build();
    }

    private RelNode values(AnonyTableSchema input) {
        return relBuilder.values(toType(input.getFieldNames()), input.getValues().toArray()).build();
    }

    private RelNode distinct(DistinctSchema input) {
        RelNode handle = handle(input.getSchema());
        relBuilder.push(handle);
        relBuilder.distinct();
        return relBuilder.peek();
    }

    private RelNode order(OrderSchema input) {
        return relBuilder.push(handle(input.getSchema())).sort(toSortRex(input.getOrders())).build();
    }

    private List<RexNode> toSortRex(List<OrderItem> orders) {
        final List<RexNode> nodes = new ArrayList<>();
        for (OrderItem field : orders) {
            toSortRex(nodes, field);
        }
        return nodes;
    }

    private RelNode limit(LimitSchema input) {
        relBuilder.push(handle(input.getSchema()));
        Number offset = (Number) input.getOffset();
        Number limit = (Number) input.getLimit();
        relBuilder.limit(offset.intValue(), limit.intValue());
        return relBuilder.build();
    }

    private void toSortRex(List<RexNode> nodes, OrderItem pair) {
        if (pair.getColumnName().equalsIgnoreCase("*")) {
            for (RexNode node : relBuilder.fields()) {
                if (pair.getDirection() == Direction.DESC) {
                    node = relBuilder.desc(node);
                }
                nodes.add(node);
            }
        } else {
            RexNode node = toRex(new Identifier(pair.getColumnName()));
            if (pair.getDirection() == Direction.DESC) {
                node = relBuilder.desc(node);
            }
            nodes.add(node);
        }
    }

    private RexNode toRex(Expr node) {
        HBTOp op = node.getOp();
        switch(op) {
            case IDENTIFIER:
                {
                    String value = ((Identifier) node).getValue();
                    if (value.startsWith("$") && Character.isDigit(value.charAt(value.length() - 1))) {
                        // 按照下标引用
                        String substring = value.substring(1);
                        if (joinCount > 1) {
                            if (substring.startsWith("$")) {
                                ImmutableList<RexNode> fields = relBuilder.fields();
                                return relBuilder.field(2, 1, Integer.parseInt(substring.substring(1)));
                            }
                            return relBuilder.field(2, 0, Integer.parseInt(substring));
                        }
                        return relBuilder.field(Integer.parseInt(substring));
                    }
                    if (joinCount > 1) {
                        try {
                            // 按照数据源查找字段
                            for (int i = 0; i < joinCount; i++) {
                                List<String> fieldNames = relBuilder.peek(i).getRowType().getFieldNames();
                                int indexOf = fieldNames.indexOf(value);
                                if (indexOf > -1) {
                                    try {
                                        return relBuilder.field(joinCount, i, indexOf);
                                    } catch (Exception e) {
                                        log.warn("may be a bug");
                                        log.error("", e);
                                    }
                                }
                            }
                        } catch (Exception e) {
                            log.warn("may be a bug");
                            log.error("", e);
                        }
                        try {
                            char c = value.charAt(value.length() - 1);
                            // 按照join命名规则查找
                            if (c == '0') {
                                return relBuilder.field(2, 1, value);
                            }
                        } catch (Exception e) {
                            log.warn("may be a bug");
                            log.error("", e);
                        }
                        return relBuilder.field(value);
                    } else {
                        return relBuilder.field(value);
                    }
                }
            case LITERAL:
                {
                    Literal node1 = (Literal) node;
                    return relBuilder.literal(node1.getValue());
                }
            default:
                {
                    if (node.op == AS_COLUMN_NAME) {
                        return as(node);
                    } else if (node.op == REF) {
                        return ref(node);
                    } else if (node.op == CAST) {
                        RexNode rexNode = toRex(node.getNodes().get(0));
                        Identifier type = (Identifier) node.getNodes().get(1);
                        return relBuilder.cast(rexNode, toType(type.getValue()).getSqlTypeName());
                    } else if (node.op == PARAM) {
                        return relBuilder.literal(params.get(paramIndex++));
                    } else if (node.op == FUN) {
                        Fun node2 = (Fun) node;
                        if ("as".equals(node2.getFunctionName())) {
                            return as(node);
                        }
                        if ("ref".equals(node2.getFunctionName())) {
                            return ref(node);
                        }
                        return this.relBuilder.call(op(node2.getFunctionName()), toRex(node.getNodes()));
                    } else {
                        throw new UnsupportedOperationException();
                    }
                }
        }
    // throw new UnsupportedOperationException();
    }

    private RexNode ref(Expr node) {
        String tableName = ((Identifier) node.getNodes().get(0)).getValue();
        String fieldName = ((Identifier) node.getNodes().get(1)).getValue();
        RexCorrelVariable relNode = correlVariableMap.getOrDefault(tableName, null);
        return relBuilder.field(relNode, fieldName);
    }

    private RexNode as(Expr node) {
        Identifier id = (Identifier) node.getNodes().get(1);
        return this.relBuilder.alias(toRex(node.getNodes().get(0)), id.getValue());
    }

    private List<RexNode> toRex(List<Expr> operands) {
        final ImmutableList.Builder<RexNode> builder = builder();
        for (Expr operand : operands) {
            builder.add(toRex(operand));
        }
        return builder.build();
    }

    private RelDataType toType(String typeText) {
        final RelDataTypeFactory typeFactory = relBuilder.getTypeFactory();
        return typeFactory.createSqlType(HBTCalciteSupport.INSTANCE.getSqlTypeName(typeText));
    }

    public static RelDataType toType(String typeText, boolean nullable, Integer precision, Integer scale) {
        final RelDataTypeFactory typeFactory = MycatCalciteSupport.INSTANCE.TypeFactory;
        SqlTypeName sqlTypeName = HBTCalciteSupport.INSTANCE.getSqlTypeName(typeText);
        RelDataType sqlType = null;
        if (precision != null && scale != null) {
            if (sqlTypeName.allowsPrec() && sqlTypeName.allowsScale()) {
                sqlType = typeFactory.createSqlType(sqlTypeName, precision, scale);
            } else if (sqlTypeName.allowsPrec() && !sqlTypeName.allowsScale()) {
                sqlType = typeFactory.createSqlType(sqlTypeName, precision);
            } else if (sqlTypeName.allowsPrec() && !sqlTypeName.allowsScale()) {
                sqlType = typeFactory.createSqlType(sqlTypeName, precision);
            } else if (!sqlTypeName.allowsPrec() && !sqlTypeName.allowsScale()) {
                sqlType = typeFactory.createSqlType(sqlTypeName);
            } else {
                throw new IllegalArgumentException("sqlTypeName:" + sqlTypeName + " precision:" + precision + " scale:" + scale);
            }
        }
        if (precision != null && scale == null) {
            if (sqlTypeName.allowsPrec()) {
                sqlType = typeFactory.createSqlType(sqlTypeName, precision);
            } else {
                sqlType = typeFactory.createSqlType(sqlTypeName);
            }
        }
        if (precision == null && scale == null) {
            sqlType = typeFactory.createSqlType(sqlTypeName);
        }
        if (sqlType == null) {
            throw new IllegalArgumentException("sqlTypeName:" + sqlTypeName);
        }
        return typeFactory.createTypeWithNullability(sqlType, nullable);
    }

    public static RelDataType toType(List<FieldType> fieldSchemaList) {
        final RelDataTypeFactory typeFactory = MycatCalciteSupport.INSTANCE.TypeFactory;
        final RelDataTypeFactory.Builder builder = typeFactory.builder();
        for (FieldType fieldSchema : fieldSchemaList) {
            boolean nullable = fieldSchema.isNullable();
            Integer precision = fieldSchema.getPrecision();
            Integer scale = fieldSchema.getScale();
            builder.add(fieldSchema.getColumnName(), toType(fieldSchema.getColumnType(), nullable, precision, scale));
        }
        return builder.build();
    }

    public RelNode makeTransientSQLScan(String targetName, RelNode input, boolean forUpdate) {
        RelDataType rowType = input.getRowType();
        SqlDialect sqlDialect = MycatCalciteSupport.INSTANCE.getSqlDialectByTargetName(targetName);
        return makeBySql(rowType, targetName, MycatCalciteSupport.INSTANCE.convertToSql(input, sqlDialect, forUpdate).getSql());
    }

    /**
     * Creates a literal (constant expression).
     */
    public static RexNode literal(RelDataType type, Object value, boolean allowCast) {
        final RexBuilder rexBuilder = MycatCalciteSupport.INSTANCE.RexBuilder;
        RelDataTypeFactory typeFactory = MycatCalciteSupport.INSTANCE.TypeFactory;
        RexNode literal;
        if (value == null) {
            literal = rexBuilder.makeNullLiteral(typeFactory.createSqlType(SqlTypeName.NULL));
        } else if (value instanceof Boolean) {
            literal = rexBuilder.makeLiteral((Boolean) value);
        } else if (value instanceof BigDecimal) {
            literal = rexBuilder.makeExactLiteral((BigDecimal) value);
        } else if (value instanceof Float || value instanceof Double) {
            literal = rexBuilder.makeApproxLiteral(BigDecimal.valueOf(((Number) value).doubleValue()));
        } else if (value instanceof Number) {
            literal = rexBuilder.makeExactLiteral(BigDecimal.valueOf(((Number) value).longValue()));
        } else if (value instanceof String) {
            literal = rexBuilder.makeLiteral((String) value);
        } else if (value instanceof Enum) {
            literal = rexBuilder.makeLiteral(value, typeFactory.createSqlType(SqlTypeName.SYMBOL), false);
        } else if (value instanceof byte[]) {
            literal = rexBuilder.makeBinaryLiteral(new ByteString((byte[]) value));
        } else if (value instanceof LocalDate) {
            LocalDate value1 = (LocalDate) value;
            DateString dateString = new DateString(value1.getYear(), value1.getMonthValue(), value1.getDayOfMonth());
            literal = rexBuilder.makeDateLiteral(dateString);
        } else if (value instanceof LocalTime) {
            LocalTime value1 = (LocalTime) value;
            TimeString timeString = new TimeString(value1.getHour(), value1.getMinute(), value1.getSecond());
            literal = rexBuilder.makeTimeLiteral(timeString, -1);
        } else if (value instanceof LocalDateTime) {
            LocalDateTime value1 = (LocalDateTime) value;
            TimestampString timeString = new TimestampString(value1.getYear(), value1.getMonthValue(), value1.getDayOfMonth(), value1.getHour(), value1.getMinute(), value1.getSecond());
            timeString = timeString.withNanos(value1.getNano());
            literal = rexBuilder.makeTimestampLiteral(timeString, -1);
        } else {
            throw new IllegalArgumentException("cannot convert " + value + " (" + value.getClreplaced() + ") to a constant");
        }
        if (allowCast) {
            return rexBuilder.makeCast(type, literal);
        } else {
            return literal;
        }
    }

    public RelBuilder values(RelDataType rowType, Object... columnValues) {
        int columnCount = rowType.getFieldCount();
        final ImmutableList.Builder<ImmutableList<RexLiteral>> listBuilder = ImmutableList.builder();
        final List<RexLiteral> valueList = new ArrayList<>();
        List<RelDataTypeField> fieldList = rowType.getFieldList();
        for (int i = 0; i < columnValues.length; i++) {
            RelDataTypeField relDataTypeField = fieldList.get(valueList.size());
            valueList.add((RexLiteral) literal(relDataTypeField.getType(), columnValues[i], false));
            if ((i + 1) % columnCount == 0) {
                listBuilder.add(ImmutableList.copyOf(valueList));
                valueList.clear();
            }
        }
        return relBuilder.values(listBuilder.build(), rowType);
    }

    public RexNode literal(Object value) {
        return literal(null, value, false);
    }

    /**
     * todo for update
     *
     * @param targetName
     * @param relDataType
     * @param sql
     * @return
     */
    public MycatTransientSQLTableScan makeBySql(RelDataType relDataType, String targetName, String sql) {
        return new MycatTransientSQLTableScan(relBuilder.getCluster(), relDataType, targetName, sql);
    }
}

18 Source : HBTQueryConvertor.java
with GNU General Public License v3.0
from MyCATApache

private RelNode setSchema(SetOpSchema input) {
    int size = input.getSchemas().size();
    if (size > 2) {
        throw new UnsupportedOperationException("set op size must equals 2");
    }
    List<RelNode> nodeList = handle(input.getSchemas());
    if (nodeList.isEmpty()) {
        throw new IllegalArgumentException();
    }
    RelBuilder relBuilder = this.relBuilder.pushAll(nodeList);
    switch(input.getOp()) {
        case UNION_DISTINCT:
            return relBuilder.union(false, size).build();
        case UNION_ALL:
            return relBuilder.union(true, size).build();
        case EXCEPT_DISTINCT:
            return relBuilder.minus(false, size).build();
        case EXCEPT_ALL:
            return relBuilder.minus(true, size).build();
        case INTERSECT_DISTINCT:
            return relBuilder.intersect(false, size).build();
        case INTERSECT_ALL:
            return relBuilder.intersect(true, size).build();
        default:
            throw new UnsupportedOperationException();
    }
}

18 Source : RelStructuredTypeFlattener.java
with Apache License 2.0
from lealone

// TODO jvs 10-Feb-2005:  factor out generic rewrite helper, with the
// ability to map between old and new rels and field ordinals.  Also,
// for now need to prohibit queries which return UDT instances.
/**
 * RelStructuredTypeFlattener removes all structured types from a tree of
 * relational expressions. Because it must operate globally on the tree, it is
 * implemented as an explicit self-contained rewrite operation instead of via
 * normal optimizer rules. This approach has the benefit that real optimizer and
 * codegen rules never have to deal with structured types.
 *
 * <p>As an example, suppose we have a structured type <code>ST(A1 smallint, A2
 * bigint)</code>, a table <code>T(c1 ST, c2 double)</code>, and a query <code>
 * select t.c2, t.c1.a2 from t</code>. After SqlToRelConverter executes, the
 * unflattened tree looks like:
 *
 * <blockquote><pre><code>
 * LogicalProject(C2=[$1], A2=[$0.A2])
 *   LogicalTableScan(table=[T])
 * </code></pre></blockquote>
 *
 * <p>After flattening, the resulting tree looks like
 *
 * <blockquote><pre><code>
 * LogicalProject(C2=[$3], A2=[$2])
 *   FtrsIndexScanRel(table=[T], index=[clustered])
 * </code></pre></blockquote>
 *
 * <p>The index scan produces a flattened row type <code>(boolean, smallint,
 * bigint, double)</code> (the boolean is a null indicator for c1), and the
 * projection picks out the desired attributes (omitting <code>$0</code> and
 * <code>$1</code> altogether). After optimization, the projection might be
 * pushed down into the index scan, resulting in a final tree like
 *
 * <blockquote><pre><code>
 * FtrsIndexScanRel(table=[T], index=[clustered], projection=[3, 2])
 * </code></pre></blockquote>
 */
public clreplaced RelStructuredTypeFlattener implements ReflectiveVisitor {

    // ~ Instance fields --------------------------------------------------------
    private final RelBuilder relBuilder;

    private final RexBuilder rexBuilder;

    private final boolean restructure;

    private final Map<RelNode, RelNode> oldToNewRelMap = new HashMap<>();

    private RelNode currentRel;

    private int iRestructureInput;

    private RelDataType flattenedRootType;

    boolean restructured;

    private final RelOptTable.ToRelContext toRelContext;

    // ~ Constructors -----------------------------------------------------------
    // to be removed before 2.0
    @Deprecated
    public RelStructuredTypeFlattener(RexBuilder rexBuilder, RelOptTable.ToRelContext toRelContext, boolean restructure) {
        this(RelFactories.LOGICAL_BUILDER.create(toRelContext.getCluster(), null), rexBuilder, toRelContext, restructure);
    }

    public RelStructuredTypeFlattener(RelBuilder relBuilder, RexBuilder rexBuilder, RelOptTable.ToRelContext toRelContext, boolean restructure) {
        this.relBuilder = relBuilder;
        this.rexBuilder = rexBuilder;
        this.toRelContext = toRelContext;
        this.restructure = restructure;
    }

    // ~ Methods ----------------------------------------------------------------
    public void updateRelInMap(SortedSetMultimap<RelNode, CorrelationId> mapRefRelToCorVar) {
        for (RelNode rel : Lists.newArrayList(mapRefRelToCorVar.keySet())) {
            if (oldToNewRelMap.containsKey(rel)) {
                SortedSet<CorrelationId> corVarSet = mapRefRelToCorVar.removeAll(rel);
                mapRefRelToCorVar.putAll(oldToNewRelMap.get(rel), corVarSet);
            }
        }
    }

    public void updateRelInMap(SortedMap<CorrelationId, LogicalCorrelate> mapCorVarToCorRel) {
        for (CorrelationId corVar : mapCorVarToCorRel.keySet()) {
            LogicalCorrelate oldRel = mapCorVarToCorRel.get(corVar);
            if (oldToNewRelMap.containsKey(oldRel)) {
                RelNode newRel = oldToNewRelMap.get(oldRel);
                replacedert newRel instanceof LogicalCorrelate;
                mapCorVarToCorRel.put(corVar, (LogicalCorrelate) newRel);
            }
        }
    }

    public RelNode rewrite(RelNode root) {
        // Perform flattening.
        final RewriteRelVisitor visitor = new RewriteRelVisitor();
        visitor.visit(root, 0, null);
        RelNode flattened = getNewForOldRel(root);
        flattenedRootType = flattened.getRowType();
        // If requested, add an additional projection which puts
        // everything back into structured form for return to the
        // client.
        restructured = false;
        List<RexNode> structuringExps = null;
        if (restructure) {
            iRestructureInput = 0;
            structuringExps = restructureFields(root.getRowType());
        }
        if (restructured) {
            // REVIEW jvs 23-Mar-2005: How do we make sure that this
            // implementation stays in Java? Fennel can't handle
            // structured types.
            return relBuilder.push(flattened).projectNamed(structuringExps, root.getRowType().getFieldNames(), true).build();
        } else {
            return flattened;
        }
    }

    private List<RexNode> restructureFields(RelDataType structuredType) {
        final List<RexNode> structuringExps = new ArrayList<>();
        for (RelDataTypeField field : structuredType.getFieldList()) {
            // TODO: row
            if (field.getType().getSqlTypeName() == SqlTypeName.STRUCTURED) {
                restructured = true;
                structuringExps.add(restructure(field.getType()));
            } else {
                structuringExps.add(RexBuilder.getRexFactory().makeInputRef(iRestructureInput, field.getType()));
                ++iRestructureInput;
            }
        }
        return structuringExps;
    }

    private RexNode restructure(RelDataType structuredType) {
        // Access null indicator for entire structure.
        RexInputRef nullIndicator = RexInputRef.of(iRestructureInput++, flattenedRootType.getFieldList());
        // Use NEW to put flattened data back together into a structure.
        List<RexNode> inputExprs = restructureFields(structuredType);
        RexNode newInvocation = rexBuilder.makeNewInvocation(structuredType, inputExprs);
        if (!structuredType.isNullable()) {
            // Optimize away the null test.
            return newInvocation;
        }
        // Construct a CASE expression to handle the structure-level null
        // indicator.
        RexNode[] caseOperands = new RexNode[3];
        // WHEN StructuredType.Indicator IS NULL
        caseOperands[0] = rexBuilder.makeCall(SqlStdOperatorTable.IS_NULL, nullIndicator);
        // THEN CAST(NULL AS StructuredType)
        caseOperands[1] = rexBuilder.makeCast(structuredType, rexBuilder.constantNull());
        // ELSE NEW StructuredType(inputs...) END
        caseOperands[2] = newInvocation;
        return rexBuilder.makeCall(SqlStdOperatorTable.CASE, caseOperands);
    }

    protected void setNewForOldRel(RelNode oldRel, RelNode newRel) {
        oldToNewRelMap.put(oldRel, newRel);
    }

    protected RelNode getNewForOldRel(RelNode oldRel) {
        return oldToNewRelMap.get(oldRel);
    }

    /**
     * Maps the ordinal of a field pre-flattening to the ordinal of the
     * corresponding field post-flattening.
     *
     * @param oldOrdinal Pre-flattening ordinal
     * @return Post-flattening ordinal
     */
    protected int getNewForOldInput(int oldOrdinal) {
        return getNewFieldForOldInput(oldOrdinal).i;
    }

    /**
     * Maps the ordinal of a field pre-flattening to the ordinal of the
     * corresponding field post-flattening, and also returns its type.
     *
     * @param oldOrdinal Pre-flattening ordinal
     * @return Post-flattening ordinal and type
     */
    protected Ord<RelDataType> getNewFieldForOldInput(int oldOrdinal) {
        replacedert currentRel != null;
        int newOrdinal = 0;
        // determine which input rel oldOrdinal references, and adjust
        // oldOrdinal to be relative to that input rel
        RelNode oldInput = null;
        RelNode newInput = null;
        for (RelNode oldInput1 : currentRel.getInputs()) {
            newInput = getNewForOldRel(oldInput1);
            RelDataType oldInputType = oldInput1.getRowType();
            int n = oldInputType.getFieldCount();
            if (oldOrdinal < n) {
                oldInput = oldInput1;
                break;
            }
            newOrdinal += newInput.getRowType().getFieldCount();
            oldOrdinal -= n;
        }
        replacedert oldInput != null;
        replacedert newInput != null;
        RelDataType oldInputType = oldInput.getRowType();
        final int newOffset = calculateFlattenedOffset(oldInputType, oldOrdinal);
        newOrdinal += newOffset;
        final RelDataTypeField field = newInput.getRowType().getFieldList().get(newOffset);
        return Ord.of(newOrdinal, field.getType());
    }

    /**
     * Returns a mapping between old and new fields.
     *
     * @param oldRel Old relational expression
     * @return Mapping between fields of old and new
     */
    private Mappings.TargetMapping getNewForOldInputMapping(RelNode oldRel) {
        final RelNode newRel = getNewForOldRel(oldRel);
        return Mappings.target(this::getNewForOldInput, oldRel.getRowType().getFieldCount(), newRel.getRowType().getFieldCount());
    }

    private int calculateFlattenedOffset(RelDataType rowType, int ordinal) {
        int offset = 0;
        if (SqlTypeUtil.needsNullIndicator(rowType)) {
            // skip null indicator
            ++offset;
        }
        List<RelDataTypeField> oldFields = rowType.getFieldList();
        for (int i = 0; i < ordinal; ++i) {
            RelDataType oldFieldType = oldFields.get(i).getType();
            if (oldFieldType.isStruct()) {
                // TODO jvs 10-Feb-2005: this isn't terribly efficient;
                // keep a mapping somewhere
                RelDataType flattened = SqlTypeUtil.flattenRecordType(rexBuilder.getTypeFactory(), oldFieldType, null);
                final List<RelDataTypeField> fields = flattened.getFieldList();
                offset += fields.size();
            } else {
                ++offset;
            }
        }
        return offset;
    }

    public void rewriteRel(LogicalTableModify rel) {
        LogicalTableModify newRel = LogicalTableModify.create(rel.getTable(), rel.getCatalogReader(), getNewForOldRel(rel.getInput()), rel.getOperation(), rel.getUpdateColumnList(), rel.getSourceExpressionList(), true);
        setNewForOldRel(rel, newRel);
    }

    public void rewriteRel(LogicalAggregate rel) {
        RelDataType inputType = rel.getInput().getRowType();
        for (RelDataTypeField field : inputType.getFieldList()) {
            if (field.getType().isStruct()) {
                // TODO jvs 10-Feb-2005
                throw Util.needToImplement("aggregation on structured types");
            }
        }
        rewriteGeneric(rel);
    }

    public void rewriteRel(Sort rel) {
        RelCollation oldCollation = rel.getCollation();
        final RelNode oldChild = rel.getInput();
        final RelNode newChild = getNewForOldRel(oldChild);
        final Mappings.TargetMapping mapping = getNewForOldInputMapping(oldChild);
        // validate
        for (RelFieldCollation field : oldCollation.getFieldCollations()) {
            int oldInput = field.getFieldIndex();
            RelDataType sortFieldType = oldChild.getRowType().getFieldList().get(oldInput).getType();
            if (sortFieldType.isStruct()) {
                // TODO jvs 10-Feb-2005
                throw Util.needToImplement("sorting on structured types");
            }
        }
        RelCollation newCollation = RexUtil.apply(mapping, oldCollation);
        Sort newRel = LogicalSort.create(newChild, newCollation, rel.offset, rel.fetch);
        setNewForOldRel(rel, newRel);
    }

    public void rewriteRel(LogicalFilter rel) {
        RelNode newRel = rel.copy(rel.getTraitSet(), getNewForOldRel(rel.getInput()), rel.getCondition().accept(new RewriteRexShuttle()));
        setNewForOldRel(rel, newRel);
    }

    public void rewriteRel(LogicalJoin rel) {
        LogicalJoin newRel = LogicalJoin.create(getNewForOldRel(rel.getLeft()), getNewForOldRel(rel.getRight()), rel.getCondition().accept(new RewriteRexShuttle()), rel.getVariablesSet(), rel.getJoinType());
        setNewForOldRel(rel, newRel);
    }

    public void rewriteRel(LogicalCorrelate rel) {
        ImmutableBitSet.Builder newPos = ImmutableBitSet.builder();
        for (int pos : rel.getRequiredColumns()) {
            RelDataType corrFieldType = rel.getLeft().getRowType().getFieldList().get(pos).getType();
            if (corrFieldType.isStruct()) {
                throw Util.needToImplement("correlation on structured type");
            }
            newPos.set(getNewForOldInput(pos));
        }
        LogicalCorrelate newRel = LogicalCorrelate.create(getNewForOldRel(rel.getLeft()), getNewForOldRel(rel.getRight()), rel.getCorrelationId(), newPos.build(), rel.getJoinType());
        setNewForOldRel(rel, newRel);
    }

    public void rewriteRel(Collect rel) {
        rewriteGeneric(rel);
    }

    public void rewriteRel(Uncollect rel) {
        rewriteGeneric(rel);
    }

    public void rewriteRel(LogicalIntersect rel) {
        rewriteGeneric(rel);
    }

    public void rewriteRel(LogicalMinus rel) {
        rewriteGeneric(rel);
    }

    public void rewriteRel(LogicalUnion rel) {
        rewriteGeneric(rel);
    }

    public void rewriteRel(LogicalValues rel) {
        // NOTE jvs 30-Apr-2006: UDT instances require invocation
        // of a constructor method, which can't be represented
        // by the tuples stored in a LogicalValues, so we don't have
        // to worry about them here.
        rewriteGeneric(rel);
    }

    public void rewriteRel(LogicalTableFunctionScan rel) {
        rewriteGeneric(rel);
    }

    public void rewriteRel(Sample rel) {
        rewriteGeneric(rel);
    }

    public void rewriteRel(LogicalProject rel) {
        final List<Pair<RexNode, String>> flattenedExpList = new ArrayList<>();
        flattenProjections(new RewriteRexShuttle(), rel.getProjects(), rel.getRowType().getFieldNames(), "", flattenedExpList);
        relBuilder.push(getNewForOldRel(rel.getInput())).projectNamed(Pair.left(flattenedExpList), Pair.right(flattenedExpList), true);
        setNewForOldRel(rel, relBuilder.build());
    }

    public void rewriteRel(LogicalCalc rel) {
        // Translate the child.
        final RelNode newInput = getNewForOldRel(rel.getInput());
        final RelOptCluster cluster = rel.getCluster();
        RexProgramBuilder programBuilder = new RexProgramBuilder(newInput.getRowType(), cluster.getRexBuilder());
        // Convert the common expressions.
        final RexProgram program = rel.getProgram();
        final RewriteRexShuttle shuttle = new RewriteRexShuttle();
        for (RexNode expr : program.getExprList()) {
            programBuilder.registerInput(expr.accept(shuttle));
        }
        // Convert the projections.
        final List<Pair<RexNode, String>> flattenedExpList = new ArrayList<>();
        List<String> fieldNames = rel.getRowType().getFieldNames();
        flattenProjections(new RewriteRexShuttle(), program.getProjectList(), fieldNames, "", flattenedExpList);
        // Register each of the new projections.
        for (Pair<RexNode, String> flattenedExp : flattenedExpList) {
            programBuilder.addProject(flattenedExp.left, flattenedExp.right);
        }
        // Translate the condition.
        final RexLocalRef conditionRef = program.getCondition();
        if (conditionRef != null) {
            final Ord<RelDataType> newField = getNewFieldForOldInput(conditionRef.getIndex());
            programBuilder.addCondition(RexBuilder.getRexFactory().makeInputRef(newField.i, newField.e));
        }
        RexProgram newProgram = programBuilder.getProgram();
        // Create a new calc relational expression.
        LogicalCalc newRel = LogicalCalc.create(newInput, newProgram);
        setNewForOldRel(rel, newRel);
    }

    public void rewriteRel(SelfFlatteningRel rel) {
        rel.flattenRel(this);
    }

    public void rewriteGeneric(RelNode rel) {
        RelNode newRel = rel.copy(rel.getTraitSet(), rel.getInputs());
        List<RelNode> oldInputs = rel.getInputs();
        for (int i = 0; i < oldInputs.size(); ++i) {
            newRel.replaceInput(i, getNewForOldRel(oldInputs.get(i)));
        }
        setNewForOldRel(rel, newRel);
    }

    private void flattenProjections(RewriteRexShuttle shuttle, List<? extends RexNode> exps, List<String> fieldNames, String prefix, List<Pair<RexNode, String>> flattenedExps) {
        for (int i = 0; i < exps.size(); ++i) {
            RexNode exp = exps.get(i);
            String fieldName = (fieldNames == null || fieldNames.get(i) == null) ? ("$" + i) : fieldNames.get(i);
            if (!prefix.equals("")) {
                fieldName = prefix + "$" + fieldName;
            }
            flattenProjection(shuttle, exp, fieldName, flattenedExps);
        }
    }

    private void flattenProjection(RewriteRexShuttle shuttle, RexNode exp, String fieldName, List<Pair<RexNode, String>> flattenedExps) {
        if (exp.getType().isStruct()) {
            if (exp instanceof RexInputRef) {
                RexInputRef inputRef = (RexInputRef) exp;
                // expand to range
                RelDataType flattenedType = SqlTypeUtil.flattenRecordType(rexBuilder.getTypeFactory(), exp.getType(), null);
                List<RelDataTypeField> fieldList = flattenedType.getFieldList();
                int n = fieldList.size();
                for (int j = 0; j < n; ++j) {
                    final Ord<RelDataType> newField = getNewFieldForOldInput(inputRef.getIndex());
                    flattenedExps.add(Pair.of(RexBuilder.getRexFactory().makeInputRef(newField.i + j, newField.e), fieldName));
                }
            } else if (isConstructor(exp) || exp.isA(SqlKind.CAST)) {
                // REVIEW jvs 27-Feb-2005: for cast, see corresponding note
                // in RewriteRexShuttle
                RexCall call = (RexCall) exp;
                if (exp.isA(SqlKind.NEW_SPECIFICATION)) {
                    // For object constructors, prepend a FALSE null
                    // indicator.
                    flattenedExps.add(Pair.of(rexBuilder.makeLiteral(false), fieldName));
                } else if (exp.isA(SqlKind.CAST)) {
                    if (RexLiteral.isNullLiteral(((RexCall) exp).getOperands().get(0))) {
                        // Translate CAST(NULL AS UDT) into
                        // the correct number of null fields.
                        flattenNullLiteral(exp.getType(), flattenedExps);
                        return;
                    }
                }
                flattenProjections(new RewriteRexShuttle(), call.getOperands(), Collections.nCopies(call.getOperands().size(), null), fieldName, flattenedExps);
            } else if (exp instanceof RexCall) {
                // NOTE jvs 10-Feb-2005: This is a lame hack to keep special
                // functions which return row types working.
                int j = 0;
                RexNode newExp = exp;
                List<RexNode> oldOperands = ((RexCall) exp).getOperands();
                if (oldOperands.get(0) instanceof RexInputRef) {
                    final RexInputRef inputRef = (RexInputRef) oldOperands.get(0);
                    final Ord<RelDataType> newField = getNewFieldForOldInput(inputRef.getIndex());
                    newExp = rexBuilder.makeCall(exp.getType(), ((RexCall) exp).getOperator(), ImmutableList.of(rexBuilder.makeInputRef(newField.e, newField.i), oldOperands.get(1)));
                }
                for (RelDataTypeField field : newExp.getType().getFieldList()) {
                    flattenedExps.add(Pair.of(rexBuilder.makeFieldAccess(newExp, field.getIndex()), fieldName + "$" + (j++)));
                }
            } else {
                throw Util.needToImplement(exp);
            }
        } else {
            flattenedExps.add(Pair.of(exp.accept(shuttle), fieldName));
        }
    }

    private void flattenNullLiteral(RelDataType type, List<Pair<RexNode, String>> flattenedExps) {
        RelDataType flattenedType = SqlTypeUtil.flattenRecordType(rexBuilder.getTypeFactory(), type, null);
        for (RelDataTypeField field : flattenedType.getFieldList()) {
            flattenedExps.add(Pair.of(rexBuilder.makeCast(field.getType(), rexBuilder.constantNull()), field.getName()));
        }
    }

    private boolean isConstructor(RexNode rexNode) {
        // TODO jvs 11-Feb-2005: share code with SqlToRelConverter
        if (!(rexNode instanceof RexCall)) {
            return false;
        }
        RexCall call = (RexCall) rexNode;
        return call.getOperator().getName().equalsIgnoreCase("row") || (call.isA(SqlKind.NEW_SPECIFICATION));
    }

    public void rewriteRel(TableScan rel) {
        RelNode newRel = rel.getTable().toRel(toRelContext);
        if (!SqlTypeUtil.isFlat(rel.getRowType())) {
            final List<Pair<RexNode, String>> flattenedExpList = new ArrayList<>();
            flattenInputs(rel.getRowType().getFieldList(), rexBuilder.makeRangeReference(newRel), flattenedExpList);
            newRel = relBuilder.push(newRel).projectNamed(Pair.left(flattenedExpList), Pair.right(flattenedExpList), true).build();
        }
        setNewForOldRel(rel, newRel);
    }

    public void rewriteRel(LogicalSnapshot rel) {
        RelNode newRel = rel.copy(rel.getTraitSet(), getNewForOldRel(rel.getInput()), rel.getPeriod().accept(new RewriteRexShuttle()));
        setNewForOldRel(rel, newRel);
    }

    public void rewriteRel(LogicalDelta rel) {
        rewriteGeneric(rel);
    }

    public void rewriteRel(LogicalChi rel) {
        rewriteGeneric(rel);
    }

    public void rewriteRel(LogicalMatch rel) {
        rewriteGeneric(rel);
    }

    /**
     * Generates expressions that reference the flattened input fields from
     * a given row type.
     */
    private void flattenInputs(List<RelDataTypeField> fieldList, RexNode prefix, List<Pair<RexNode, String>> flattenedExpList) {
        for (RelDataTypeField field : fieldList) {
            final RexNode ref = rexBuilder.makeFieldAccess(prefix, field.getIndex());
            if (field.getType().isStruct()) {
                flattenInputs(field.getType().getFieldList(), ref, flattenedExpList);
            } else {
                flattenedExpList.add(Pair.of(ref, field.getName()));
            }
        }
    }

    // ~ Inner Interfaces -------------------------------------------------------
    /**
     * Mix-in interface for relational expressions that know how to
     * flatten themselves.
     */
    public interface SelfFlatteningRel extends RelNode {

        void flattenRel(RelStructuredTypeFlattener flattener);
    }

    // ~ Inner Clreplacedes ----------------------------------------------------------
    /**
     * Visitor that flattens each relational expression in a tree.
     */
    private clreplaced RewriteRelVisitor extends RelVisitor {

        private final ReflectiveVisitDispatcher<RelStructuredTypeFlattener, RelNode> dispatcher = ReflectUtil.createDispatcher(RelStructuredTypeFlattener.clreplaced, RelNode.clreplaced);

        @Override
        public void visit(RelNode p, int ordinal, RelNode parent) {
            // rewrite children first
            super.visit(p, ordinal, parent);
            currentRel = p;
            final String visitMethodName = "rewriteRel";
            boolean found = dispatcher.invokeVisitor(RelStructuredTypeFlattener.this, currentRel, visitMethodName);
            currentRel = null;
            if (!found) {
                if (p.getInputs().size() == 0) {
                    // for leaves, it's usually safe to replacedume that
                    // no transformation is required
                    rewriteGeneric(p);
                } else {
                    throw new replacedertionError("no '" + visitMethodName + "' method found for clreplaced " + p.getClreplaced().getName());
                }
            }
        }
    }

    /**
     * Shuttle that rewrites scalar expressions.
     */
    private clreplaced RewriteRexShuttle extends RexShuttle {

        @Override
        public RexNode visitInputRef(RexInputRef input) {
            final int oldIndex = input.getIndex();
            final Ord<RelDataType> field = getNewFieldForOldInput(oldIndex);
            // Use the actual flattened type, which may be different from the current
            // type.
            RelDataType fieldType = removeDistinct(field.e);
            return RexBuilder.getRexFactory().makeInputRef(field.i, fieldType);
        }

        private RelDataType removeDistinct(RelDataType type) {
            if (type.getSqlTypeName() != SqlTypeName.DISTINCT) {
                return type;
            }
            return type.getFieldList().get(0).getType();
        }

        @Override
        public RexNode visitFieldAccess(RexFieldAccess fieldAccess) {
            // walk down the field access path expression, calculating
            // the desired input number
            int iInput = 0;
            Deque<Integer> accessOrdinals = new ArrayDeque<>();
            for (; ; ) {
                RexNode refExp = fieldAccess.getReferenceExpr();
                int ordinal = fieldAccess.getField().getIndex();
                accessOrdinals.push(ordinal);
                iInput += calculateFlattenedOffset(refExp.getType(), ordinal);
                if (refExp instanceof RexInputRef) {
                    // Consecutive field accesses over some input can be removed since by now the input
                    // is flattened (no struct types). We just have to create a new RexInputRef with the
                    // correct ordinal and type.
                    RexInputRef inputRef = (RexInputRef) refExp;
                    final Ord<RelDataType> newField = getNewFieldForOldInput(inputRef.getIndex());
                    iInput += newField.i;
                    return RexBuilder.getRexFactory().makeInputRef(iInput, removeDistinct(newField.e));
                } else if (refExp instanceof RexCorrelVariable) {
                    RelDataType refType = SqlTypeUtil.flattenRecordType(rexBuilder.getTypeFactory(), refExp.getType(), null);
                    refExp = rexBuilder.makeCorrel(refType, ((RexCorrelVariable) refExp).getCorrelationId());
                    return rexBuilder.makeFieldAccess(refExp, iInput);
                } else if (refExp instanceof RexCall) {
                    // Field accesses over calls cannot be simplified since the result of the call may be
                    // a struct type.
                    RexCall call = (RexCall) refExp;
                    RexNode newRefExp = visitCall(call);
                    for (Integer ord : accessOrdinals) {
                        newRefExp = rexBuilder.makeFieldAccess(newRefExp, ord);
                    }
                    return newRefExp;
                } else if (refExp instanceof RexFieldAccess) {
                    fieldAccess = (RexFieldAccess) refExp;
                } else {
                    throw Util.needToImplement(refExp);
                }
            }
        }

        @Override
        public RexNode visitCall(RexCall rexCall) {
            if (rexCall.isA(SqlKind.CAST)) {
                RexNode input = rexCall.getOperands().get(0).accept(this);
                RelDataType targetType = removeDistinct(rexCall.getType());
                return rexBuilder.makeCast(targetType, input);
            }
            if (!rexCall.isA(SqlKind.COMPARISON)) {
                return super.visitCall(rexCall);
            }
            RexNode lhs = rexCall.getOperands().get(0);
            if (!lhs.getType().isStruct()) {
                // NOTE jvs 9-Mar-2005: Calls like IS NULL operate
                // on the representative null indicator. Since it comes
                // first, we don't have to do any special translation.
                return super.visitCall(rexCall);
            }
            // NOTE jvs 22-Mar-2005: Likewise, the null indicator takes
            // care of comparison null semantics without any special casing.
            return flattenComparison(rexBuilder, rexCall.getOperator(), rexCall.getOperands());
        }

        @Override
        public RexNode visitSubQuery(RexSubQuery subQuery) {
            subQuery = (RexSubQuery) super.visitSubQuery(subQuery);
            RelStructuredTypeFlattener flattener = new RelStructuredTypeFlattener(relBuilder, rexBuilder, toRelContext, restructure);
            RelNode rel = flattener.rewrite(subQuery.getRel());
            return subQuery.clone(rel);
        }

        private RexNode flattenComparison(RexBuilder rexBuilder, SqlOperator op, List<RexNode> exprs) {
            final List<Pair<RexNode, String>> flattenedExps = new ArrayList<>();
            flattenProjections(this, exprs, null, "", flattenedExps);
            int n = flattenedExps.size() / 2;
            boolean negate = false;
            if (op.getKind() == SqlKind.NOT_EQUALS) {
                negate = true;
                op = SqlStdOperatorTable.EQUALS;
            }
            if ((n > 1) && op.getKind() != SqlKind.EQUALS) {
                throw Util.needToImplement("inequality comparison for row types");
            }
            RexNode conjunction = null;
            for (int i = 0; i < n; ++i) {
                RexNode comparison = rexBuilder.makeCall(op, flattenedExps.get(i).left, flattenedExps.get(i + n).left);
                if (conjunction == null) {
                    conjunction = comparison;
                } else {
                    conjunction = rexBuilder.makeCall(SqlStdOperatorTable.AND, conjunction, comparison);
                }
            }
            if (negate) {
                return rexBuilder.makeCall(SqlStdOperatorTable.NOT, conjunction);
            } else {
                return conjunction;
            }
        }
    }
}

18 Source : AggregateStarTableRule.java
with Apache License 2.0
from lealone

private static AggregateCall rollUp(int groupCount, RelBuilder relBuilder, AggregateCall aggregateCall, TileKey tileKey) {
    if (aggregateCall.isDistinct()) {
        return null;
    }
    final SqlAggFunction aggregation = aggregateCall.getAggregation();
    final Pair<SqlAggFunction, List<Integer>> seek = Pair.of(aggregation, aggregateCall.getArgList());
    final int offset = tileKey.dimensions.cardinality();
    final ImmutableList<Lattice.Measure> measures = tileKey.measures;
    // First, try to satisfy the aggregation by rolling up an aggregate in the
    // materialization.
    final int i = find(measures, seek);
    tryRoll: if (i >= 0) {
        final SqlAggFunction roll = SubsreplacedutionVisitor.getRollup(aggregation);
        if (roll == null) {
            break tryRoll;
        }
        return AggregateCall.create(roll, false, aggregateCall.isApproximate(), ImmutableList.of(offset + i), -1, aggregateCall.collation, groupCount, relBuilder.peek(), null, aggregateCall.name);
    }
    // Second, try to satisfy the aggregation based on group set columns.
    tryGroup: {
        List<Integer> newArgs = new ArrayList<>();
        for (Integer arg : aggregateCall.getArgList()) {
            int z = tileKey.dimensions.indexOf(arg);
            if (z < 0) {
                break tryGroup;
            }
            newArgs.add(z);
        }
        return AggregateCall.create(aggregation, false, aggregateCall.isApproximate(), newArgs, -1, aggregateCall.collation, groupCount, relBuilder.peek(), null, aggregateCall.name);
    }
    // No roll up possible.
    return null;
}

18 Source : AggregateReduceFunctionsRule.java
with Apache License 2.0
from lealone

/**
 * Add a calc with the expressions to compute the original agg calls from the
 * decomposed ones.
 *
 * @param relBuilder Builder of relational expressions; at the top of its
 *                   stack is its input
 * @param rowType The output row type of the original aggregate.
 * @param exprs The expressions to compute the original agg calls.
 */
protected void newCalcRel(RelBuilder relBuilder, RelDataType rowType, List<RexNode> exprs) {
    relBuilder.project(exprs, rowType.getFieldNames());
}

18 Source : AggregateReduceFunctionsRule.java
with Apache License 2.0
from lealone

/**
 * Reduces all calls to AVG, STDDEV_POP, STDDEV_SAMP, VAR_POP, VAR_SAMP in
 * the aggregates list to.
 *
 * <p>It handles newly generated common subexpressions since this was done
 * at the sql2rel stage.
 */
private void reduceAggs(RelOptRuleCall ruleCall, Aggregate oldAggRel) {
    RexBuilder rexBuilder = oldAggRel.getCluster().getRexBuilder();
    List<AggregateCall> oldCalls = oldAggRel.getAggCallList();
    final int groupCount = oldAggRel.getGroupCount();
    final int indicatorCount = oldAggRel.getIndicatorCount();
    final List<AggregateCall> newCalls = new ArrayList<>();
    final Map<AggregateCall, RexNode> aggCallMapping = new HashMap<>();
    final List<RexNode> projList = new ArrayList<>();
    // preplaced through group key (+ indicators if present)
    for (int i = 0; i < groupCount + indicatorCount; ++i) {
        projList.add(rexBuilder.makeInputRef(getFieldType(oldAggRel, i), i));
    }
    // List of input expressions. If a particular aggregate needs more, it
    // will add an expression to the end, and we will create an extra
    // project.
    final RelBuilder relBuilder = ruleCall.builder();
    relBuilder.push(oldAggRel.getInput());
    final List<RexNode> inputExprs = new ArrayList<>(relBuilder.fields());
    // create new agg function calls and rest of project list together
    for (AggregateCall oldCall : oldCalls) {
        projList.add(reduceAgg(oldAggRel, oldCall, newCalls, aggCallMapping, inputExprs));
    }
    final int extraArgCount = inputExprs.size() - relBuilder.peek().getRowType().getFieldCount();
    if (extraArgCount > 0) {
        relBuilder.project(inputExprs, CompositeList.of(relBuilder.peek().getRowType().getFieldNames(), Collections.nCopies(extraArgCount, null)));
    }
    newAggregateRel(relBuilder, oldAggRel, newCalls);
    newCalcRel(relBuilder, oldAggRel.getRowType(), projList);
    ruleCall.transformTo(relBuilder.build());
}

18 Source : AggregateReduceFunctionsRule.java
with Apache License 2.0
from lealone

/**
 * Do a shallow clone of oldAggRel and update aggCalls. Could be refactored
 * into Aggregate and subclreplacedes - but it's only needed for some
 * subclreplacedes.
 *
 * @param relBuilder Builder of relational expressions; at the top of its
 *                   stack is its input
 * @param oldAggregate LogicalAggregate to clone.
 * @param newCalls  New list of AggregateCalls
 */
protected void newAggregateRel(RelBuilder relBuilder, Aggregate oldAggregate, List<AggregateCall> newCalls) {
    relBuilder.aggregate(relBuilder.groupKey(oldAggregate.getGroupSet(), oldAggregate.getGroupSets()), newCalls);
}

18 Source : MutableRels.java
with Apache License 2.0
from lealone

private static List<RelNode> fromMutables(List<MutableRel> nodes, final RelBuilder relBuilder) {
    return Lists.transform(nodes, mutableRel -> fromMutable(mutableRel, relBuilder));
}

18 Source : CalciteForeignValue.java
with Apache License 2.0
from julianhyde

/**
 * Value based on a Calcite schema.
 *
 * <p>In ML, it appears as a record with a field for each table.
 */
public clreplaced CalciteForeignValue implements ForeignValue {

    private final SchemaPlus schema;

    private final boolean lower;

    private final RelBuilder relBuilder;

    /**
     * Creates a CalciteForeignValue.
     */
    public CalciteForeignValue(SchemaPlus schema, boolean lower) {
        this.schema = Objects.requireNonNull(schema);
        this.lower = lower;
        this.relBuilder = RelBuilder.create(Frameworks.newConfigBuilder().defaultSchema(rootSchema(schema)).build());
    }

    private static SchemaPlus rootSchema(SchemaPlus schema) {
        for (; ; ) {
            if (schema.getParentSchema() == null) {
                return schema;
            }
            schema = schema.getParentSchema();
        }
    }

    public Type type(TypeSystem typeSystem) {
        final ImmutableSortedMap.Builder<String, Type> fields = ImmutableSortedMap.orderedBy(RecordType.ORDERING);
        schema.getTableNames().forEach(tableName -> fields.put(convert(tableName), toType(schema.getTable(tableName), typeSystem)));
        return typeSystem.recordType(fields.build());
    }

    private Type toType(Table table, TypeSystem typeSystem) {
        final ImmutableSortedMap.Builder<String, Type> fields = ImmutableSortedMap.orderedBy(RecordType.ORDERING);
        table.getRowType(relBuilder.getTypeFactory()).getFieldList().forEach(field -> fields.put(convert(field.getName()), toType(field).mlType));
        return typeSystem.listType(typeSystem.recordType(fields.build()));
    }

    private String convert(String name) {
        return lower ? name.toLowerCase(Locale.ROOT) : name;
    }

    private FieldConverter toType(RelDataTypeField field) {
        final int ordinal = field.getIndex();
        switch(field.getType().getSqlTypeName()) {
            case BOOLEAN:
                return new FieldConverter(PrimitiveType.BOOL, ordinal) {

                    public Boolean convertFrom(Object[] sourceValues) {
                        return (Boolean) sourceValues[ordinal];
                    }
                };
            case TINYINT:
            case SMALLINT:
            case INTEGER:
            case BIGINT:
                return new FieldConverter(PrimitiveType.INT, ordinal) {

                    public Integer convertFrom(Object[] sourceValues) {
                        final Number sourceValue = (Number) sourceValues[ordinal];
                        return sourceValue == null ? 0 : sourceValue.intValue();
                    }
                };
            case FLOAT:
            case REAL:
            case DOUBLE:
            case DECIMAL:
                return new FieldConverter(PrimitiveType.REAL, ordinal) {

                    public Float convertFrom(Object[] sourceValues) {
                        final Number sourceValue = (Number) sourceValues[ordinal];
                        return sourceValue == null ? 0f : sourceValue.floatValue();
                    }
                };
            case DATE:
                return new FieldConverter(PrimitiveType.STRING, ordinal) {

                    public String convertFrom(Object[] sourceValues) {
                        final Date sourceValue = (Date) sourceValues[ordinal];
                        return sourceValue == null ? "" : sourceValue.toString();
                    }
                };
            case TIME:
                return new FieldConverter(PrimitiveType.STRING, ordinal) {

                    public String convertFrom(Object[] sourceValues) {
                        final Time sourceValue = (Time) sourceValues[ordinal];
                        return sourceValue == null ? "" : sourceValue.toString();
                    }
                };
            case TIMESTAMP:
                return new FieldConverter(PrimitiveType.STRING, ordinal) {

                    public String convertFrom(Object[] sourceValues) {
                        final Timestamp sourceValue = (Timestamp) sourceValues[ordinal];
                        return sourceValue == null ? "" : sourceValue.toString();
                    }
                };
            case VARCHAR:
            case CHAR:
            default:
                return new FieldConverter(PrimitiveType.STRING, ordinal) {

                    public String convertFrom(Object[] sourceValues) {
                        final String sourceValue = (String) sourceValues[ordinal];
                        return sourceValue == null ? "" : sourceValue;
                    }
                };
        }
    }

    public Object value() {
        final ImmutableList.Builder<List<Object>> fieldValues = ImmutableList.builder();
        final List<String> names = Schemas.path(schema).names();
        schema.getTableNames().forEach(tableName -> fieldValues.add(new RelList(relBuilder.scan(plus(names, tableName)).build(), new EmptyDataContext((JavaTypeFactory) relBuilder.getTypeFactory(), rootSchema(schema)), new Converter(relBuilder.scan(plus(names, tableName)).build().getRowType()))));
        return fieldValues.build();
    }

    /**
     * Returns a copy of a list with one element appended.
     */
    private static <E> List<E> plus(List<E> list, E e) {
        return ImmutableList.<E>builder().addAll(list).add(e).build();
    }

    /**
     * Converts from a Calcite row to an SML record.
     *
     * <p>The Calcite row is represented as an array, ordered by field ordinal;
     * the SML record is represented by a list, ordered by field name
     * (lower-case if {@link #lower}).
     */
    private clreplaced Converter implements Function1<Object[], List<Object>> {

        final Object[] tempValues;

        final FieldConverter[] fieldConverters;

        Converter(RelDataType rowType) {
            final List<RelDataTypeField> fields = new ArrayList<>(rowType.getFieldList());
            fields.sort(Comparator.comparing(f -> convert(f.getName())));
            tempValues = new Object[fields.size()];
            fieldConverters = new FieldConverter[fields.size()];
            for (int i = 0; i < fieldConverters.length; i++) {
                fieldConverters[i] = toType(fields.get(i));
            }
        }

        public List<Object> apply(Object[] a) {
            for (int i = 0; i < tempValues.length; i++) {
                tempValues[i] = fieldConverters[i].convertFrom(a);
            }
            return ImmutableNullableList.copyOf(tempValues);
        }
    }

    /**
     * Data context that has no variables.
     */
    private static clreplaced EmptyDataContext implements DataContext {

        private final JavaTypeFactory typeFactory;

        private final SchemaPlus rootSchema;

        EmptyDataContext(JavaTypeFactory typeFactory, SchemaPlus rootSchema) {
            this.typeFactory = typeFactory;
            this.rootSchema = rootSchema;
        }

        public SchemaPlus getRootSchema() {
            return rootSchema;
        }

        public JavaTypeFactory getTypeFactory() {
            return typeFactory;
        }

        public QueryProvider getQueryProvider() {
            throw new UnsupportedOperationException();
        }

        public Object get(String name) {
            return null;
        }
    }

    /**
     * Converts a field from Calcite to SML format.
     */
    private abstract static clreplaced FieldConverter {

        final Type mlType;

        final int ordinal;

        FieldConverter(Type mlType, int ordinal) {
            this.mlType = mlType;
            this.ordinal = ordinal;
        }

        /**
         * Given a Calcite row, returns the value of this field in SML format.
         */
        public abstract Object convertFrom(Object[] sourceValues);
    }
}

18 Source : ExtendedAggregateExtractProjectRule.java
with Apache License 2.0
from flink-tpc-ds

private List<RelBuilder.AggCall> getNewAggCallList(Aggregate oldAggregate, RelBuilder relBuilder, Mapping mapping) {
    final List<RelBuilder.AggCall> newAggCallList = new ArrayList<>();
    for (AggregateCall aggCall : oldAggregate.getAggCallList()) {
        final RexNode filterArg = aggCall.filterArg < 0 ? null : relBuilder.field(Mappings.apply(mapping, aggCall.filterArg));
        newAggCallList.add(relBuilder.aggregateCall(aggCall.getAggregation(), relBuilder.fields(Mappings.apply2(mapping, aggCall.getArgList()))).distinct(aggCall.isDistinct()).filter(filterArg).approximate(aggCall.isApproximate()).sort(relBuilder.fields(aggCall.collation)).as(aggCall.name));
    }
    return newAggCallList;
}

18 Source : DremioFieldTrimmer.java
with Apache License 2.0
from dremio

public static DremioFieldTrimmer of(RelOptCluster cluster, boolean isRelPlanning) {
    RelBuilder builder = DremioRelFactories.CALCITE_LOGICAL_BUILDER.create(cluster, null);
    return new DremioFieldTrimmer(builder, isRelPlanning);
}

18 Source : DremioRelDecorrelator.java
with Apache License 2.0
from dremio

/**
 * Decorrelates a query.
 *
 * <p>This is the main entry point to {@code DremioRelDecorrelator}.
 *
 * @param rootRel Root node of the query
 * @param forceValueGenerator force value generator to be created when decorrelating filters
 * @param relBuilder        Builder for relational expressions
 *
 * @return Equivalent query with all
 * {@link Correlate} instances removed
 */
public static RelNode decorrelateQuery(RelNode rootRel, RelBuilder relBuilder, boolean forceValueGenerator, boolean isRelPlanning) {
    final CorelMap corelMap = new CorelMapBuilder().build(rootRel);
    if (!corelMap.hasCorrelation()) {
        return rootRel;
    }
    final RelOptCluster cluster = rootRel.getCluster();
    final DremioRelDecorrelator decorrelator = new DremioRelDecorrelator(corelMap, cluster.getPlanner().getContext(), relBuilder, forceValueGenerator, isRelPlanning);
    RelNode newRootRel = decorrelator.removeCorrelationViaRule(rootRel);
    if (!decorrelator.cm.getMapCorToCorRel().isEmpty()) {
        newRootRel = decorrelator.decorrelate(newRootRel);
    }
    return newRootRel;
}

17 Source : SQLRBORewriter.java
with GNU General Public License v3.0
from MyCATApache

@Override
public RelNode visit(LogicalCalc calc) {
    final Pair<ImmutableList<RexNode>, ImmutableList<RexNode>> projectFilter = calc.getProgram().split();
    RelBuilder relBuilder = MycatCalciteSupport.relBuilderFactory.create(calc.getCluster(), null);
    relBuilder.filter(projectFilter.right);
    relBuilder.project(projectFilter.left, calc.getRowType().getFieldNames());
    RelNode relNode = relBuilder.build();
    return relNode.accept(this);
}

17 Source : SemiJoinProjectTransposeRule.java
with Apache License 2.0
from lealone

// ~ Methods ----------------------------------------------------------------
public void onMatch(RelOptRuleCall call) {
    SemiJoin semiJoin = call.rel(0);
    LogicalProject project = call.rel(1);
    // Convert the LHS semi-join keys to reference the child projection
    // expression; all projection expressions must be RexInputRefs,
    // otherwise, we wouldn't have created this semi-join.
    final List<Integer> newLeftKeys = new ArrayList<>();
    final List<Integer> leftKeys = semiJoin.getLeftKeys();
    final List<RexNode> projExprs = project.getProjects();
    for (int leftKey : leftKeys) {
        RexInputRef inputRef = (RexInputRef) projExprs.get(leftKey);
        newLeftKeys.add(inputRef.getIndex());
    }
    // convert the semijoin condition to reflect the LHS with the project
    // pulled up
    RexNode newCondition = adjustCondition(project, semiJoin);
    SemiJoin newSemiJoin = SemiJoin.create(project.getInput(), semiJoin.getRight(), newCondition, ImmutableIntList.copyOf(newLeftKeys), semiJoin.getRightKeys());
    // Create the new projection.  Note that the projection expressions
    // are the same as the original because they only reference the LHS
    // of the semijoin and the semijoin only projects out the LHS
    final RelBuilder relBuilder = call.builder();
    relBuilder.push(newSemiJoin);
    relBuilder.project(projExprs, project.getRowType().getFieldNames());
    call.transformTo(relBuilder.build());
}

17 Source : TableScan.java
with Apache License 2.0
from lealone

/**
 * Projects a subset of the fields of the table, and also asks for "extra"
 * fields that were not included in the table's official type.
 *
 * <p>The default implementation replacedumes that tables cannot do either of
 * these operations, therefore it adds a {@link Project} that projects
 * {@code NULL} values for the extra fields, using the
 * {@link RelBuilder#project(Iterable)} method.
 *
 * <p>Sub-clreplacedes, representing table types that have these capabilities,
 * should override.</p>
 *
 * @param fieldsUsed  Bitmap of the fields desired by the consumer
 * @param extraFields Extra fields, not advertised in the table's row-type,
 *                    wanted by the consumer
 * @param relBuilder Builder used to create a Project
 * @return Relational expression that projects the desired fields
 */
public RelNode project(ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields, RelBuilder relBuilder) {
    final int fieldCount = getRowType().getFieldCount();
    if (fieldsUsed.equals(ImmutableBitSet.range(fieldCount)) && extraFields.isEmpty()) {
        return this;
    }
    final List<RexNode> exprList = new ArrayList<>();
    final List<String> nameList = new ArrayList<>();
    final RexBuilder rexBuilder = getCluster().getRexBuilder();
    final List<RelDataTypeField> fields = getRowType().getFieldList();
    // Project the subset of fields.
    for (int i : fieldsUsed) {
        RelDataTypeField field = fields.get(i);
        exprList.add(rexBuilder.makeInputRef(this, i));
        nameList.add(field.getName());
    }
    // Project nulls for the extra fields. (Maybe a sub-clreplaced table has
    // extra fields, but we don't.)
    for (RelDataTypeField extraField : extraFields) {
        exprList.add(rexBuilder.ensureType(extraField.getType(), rexBuilder.constantNull(), true));
        nameList.add(extraField.getName());
    }
    return relBuilder.push(this).project(exprList, nameList).build();
}

17 Source : ExtendedAggregateExtractProjectRule.java
with Apache License 2.0
from flink-tpc-ds

/**
 * Extract projects from the Aggregate and return the index mapping between the new projects
 * and it's input.
 */
private Mapping extractProjectsAndMapping(Aggregate aggregate, RelNode input, RelBuilder relBuilder) {
    // Compute which input fields are used.
    final ImmutableBitSet.Builder inputFieldsUsed = getInputFieldUsed(aggregate, input);
    final List<RexNode> projects = new ArrayList<>();
    final Mapping mapping = Mappings.create(MappingType.INVERSE_SURJECTION, aggregate.getInput().getRowType().getFieldCount(), inputFieldsUsed.cardinality());
    int j = 0;
    for (int i : inputFieldsUsed.build()) {
        projects.add(relBuilder.field(i));
        mapping.set(i, j++);
    }
    if (input instanceof Project) {
        // this will not create trivial projects
        relBuilder.project(projects);
    } else {
        relBuilder.project(projects, Collections.emptyList(), true);
    }
    return mapping;
}

17 Source : MoreRelOptUtil.java
with Apache License 2.0
from dremio

private static RelNode handleAggregate(Aggregate agg, RelBuilder relBuilder) {
    final Set<Integer> newGroupKeySet = new HashSet<>();
    relBuilder.push(agg.getInput());
    // 1. Create an array with the number of group by keys in Aggregate
    // 2. Partially fill out the array with constant fields at the original position in the rowType
    // 3. Add a project below aggregate by reordering the fields in a way that non-constant fields come first
    // 3. Build an aggregate for the remaining fields
    // 4. Fill out the rest of the array by getting fields from constructed aggregate
    // 5. Create a Project with removed constants along with original fields from input
    int numGroupKeys = agg.getGroupSet().cardinality();
    if (numGroupKeys == 0) {
        return null;
    }
    final RexNode[] groupProjects = new RexNode[numGroupKeys];
    final LinkedHashSet<Integer> constantInd = new LinkedHashSet<>();
    final LinkedHashSet<Integer> groupInd = new LinkedHashSet<>();
    final Map<Integer, Integer> mapping = new HashMap<>();
    for (int i = 0; i < agg.getGroupSet().cardinality(); i++) {
        RexLiteral literal = projectedLiteral(agg.getInput(), i);
        if (literal != null) {
            groupProjects[i] = literal;
            constantInd.add(i);
        } else {
            groupProjects[i] = null;
            newGroupKeySet.add(groupInd.size());
            groupInd.add(i);
        }
    }
    final List<RexNode> projectBelowAggregate = new ArrayList<>();
    if (constantInd.size() <= 1 || constantInd.size() == numGroupKeys) {
        return null;
    }
    for (int ind : groupInd) {
        mapping.put(ind, projectBelowAggregate.size());
        projectBelowAggregate.add(relBuilder.field(ind));
    }
    for (int ind : constantInd) {
        mapping.put(ind, projectBelowAggregate.size());
        projectBelowAggregate.add(relBuilder.field(ind));
    }
    for (int i = 0; i < relBuilder.fields().size(); i++) {
        if (!constantInd.contains(i) && !groupInd.contains(i)) {
            mapping.put(i, projectBelowAggregate.size());
            projectBelowAggregate.add(relBuilder.field(i));
        }
    }
    final List<AggregateCall> aggregateCalls = new ArrayList<>();
    for (final AggregateCall aggregateCall : agg.getAggCallList()) {
        List<Integer> newArgList = new ArrayList<>();
        for (final int argIndex : aggregateCall.getArgList()) {
            newArgList.add(mapping.get(argIndex));
        }
        aggregateCalls.add(aggregateCall.copy(newArgList, aggregateCall.filterArg));
    }
    ImmutableBitSet newGroupSet = ImmutableBitSet.of(newGroupKeySet);
    relBuilder.project(projectBelowAggregate).aggregate(relBuilder.groupKey(newGroupSet.toArray()), aggregateCalls);
    int count = 0;
    for (int i = 0; i < groupProjects.length; i++) {
        if (groupProjects[i] == null) {
            groupProjects[i] = relBuilder.field(count);
            count++;
        }
    }
    List<RexNode> projects = new ArrayList<>(Arrays.asList(groupProjects));
    for (int i = 0; i < agg.getAggCallList().size(); i++) {
        projects.add(relBuilder.field(i + newGroupKeySet.size()));
    }
    RelNode newRel = relBuilder.project(projects, agg.getRowType().getFieldNames()).build();
    if (!RelOptUtil.areRowTypesEqual(newRel.getRowType(), agg.getRowType(), true)) {
        return null;
    }
    return newRel;
}

16 Source : RelContextProvider.java
with BSD 2-Clause "Simplified" License
from linkedin

/**
 * Calcite needs different objects that are not trivial to create. This clreplaced
 * simplifies creation of objects, required by Calcite, easy. These objects
 * are created only once and shared across each call to corresponding getter.
 */
// TODO: Replace this with Google injection framework
public clreplaced RelContextProvider {

    private final FrameworkConfig config;

    private final HiveMetastoreClient hiveMetastoreClient;

    private RelBuilder relBuilder;

    private CalciteCatalogReader catalogReader;

    private HiveSqlValidator sqlValidator;

    private RelOptCluster cluster;

    private SqlToRelConverter relConverter;

    private final HiveConvertletTable convertletTable = new HiveConvertletTable();

    private Driver driver;

    // maintain a mutable copy of Hive function registry in order to save some UDF information
    // resolved at run time.  For example, dependencies information.
    private HiveFunctionRegistry registry;

    private ConcurrentHashMap<String, HiveFunction> dynamicRegistry;

    private HiveSchema schema;

    private LocalMetastoreHiveSchema localMetastoreSchema;

    /**
     * Instantiates a new Rel context provider.
     *
     * @param hiveMetastoreClient Hive metastore client to construct Calcite schema
     */
    public RelContextProvider(@Nonnull HiveMetastoreClient hiveMetastoreClient) {
        Preconditions.checkNotNull(hiveMetastoreClient);
        this.hiveMetastoreClient = hiveMetastoreClient;
        this.schema = new HiveSchema(hiveMetastoreClient);
        SchemaPlus schemaPlus = Frameworks.createRootSchema(false);
        schemaPlus.add(HiveSchema.ROOT_SCHEMA, schema);
        this.registry = new StaticHiveFunctionRegistry();
        this.dynamicRegistry = new ConcurrentHashMap<>();
        // this is to ensure that jdbc:calcite driver is correctly registered
        // before initializing framework (which needs it)
        // We don't want each engine to register the driver. It may not also load correctly
        // if the service uses its own service loader (see Presto)
        driver = new Driver();
        config = Frameworks.newConfigBuilder().convertletTable(convertletTable).defaultSchema(schemaPlus).typeSystem(new HiveTypeSystem()).traitDefs((List<RelTraitDef>) null).operatorTable(ChainedSqlOperatorTable.of(SqlStdOperatorTable.instance(), new DaliOperatorTable(this.registry, this.dynamicRegistry))).programs(Programs.ofRules(Programs.RULE_SET)).build();
    }

    /**
     * Instantiates a new Rel context provider.
     *
     * @param localMetaStore in-memory version of Hive metastore client used to  construct Calcite schema
     */
    public RelContextProvider(Map<String, Map<String, List<String>>> localMetaStore) {
        this.hiveMetastoreClient = null;
        this.localMetastoreSchema = new LocalMetastoreHiveSchema(localMetaStore);
        SchemaPlus schemaPlus = Frameworks.createRootSchema(false);
        schemaPlus.add(HiveSchema.ROOT_SCHEMA, localMetastoreSchema);
        this.registry = new StaticHiveFunctionRegistry();
        this.dynamicRegistry = new ConcurrentHashMap<>();
        // this is to ensure that jdbc:calcite driver is correctly registered
        // before initializing framework (which needs it)
        // We don't want each engine to register the driver. It may not also load correctly
        // if the service uses its own service loader (see Presto)
        driver = new Driver();
        config = Frameworks.newConfigBuilder().convertletTable(convertletTable).defaultSchema(schemaPlus).typeSystem(new HiveTypeSystem()).traitDefs((List<RelTraitDef>) null).operatorTable(ChainedSqlOperatorTable.of(SqlStdOperatorTable.instance(), new DaliOperatorTable(this.registry, this.dynamicRegistry))).programs(Programs.ofRules(Programs.RULE_SET)).build();
    }

    /**
     * Gets the local copy of HiveFunctionRegistry for current query.
     *
     * @return HiveFunctionRegistry map
     */
    public HiveFunctionRegistry getHiveFunctionRegistry() {
        return this.registry;
    }

    public ConcurrentHashMap<String, HiveFunction> getDynamicHiveFunctionRegistry() {
        return this.dynamicRegistry;
    }

    /**
     * Gets {@link FrameworkConfig} for creation of various objects
     * from Calcite object model
     *
     * @return FrameworkConfig object
     */
    public FrameworkConfig getConfig() {
        return config;
    }

    ParseTreeBuilder.Config getParseTreeBuilderConfig() {
        return new ParseTreeBuilder.Config().setCatalogName(HiveSchema.ROOT_SCHEMA).setDefaultDB(HiveDbSchema.DEFAULT_DB);
    }

    HiveMetastoreClient getHiveMetastoreClient() {
        return hiveMetastoreClient;
    }

    Schema getHiveSchema() {
        return (schema != null) ? this.schema : this.localMetastoreSchema;
    }

    /**
     * Gets {@link RelBuilder} object for generating relational algebra.
     *
     * @return the rel builder
     */
    public RelBuilder getRelBuilder() {
        if (relBuilder == null) {
            // Turn off Rel simplification. Rel simplification can statically interpret boolean conditions in
            // OR, AND, CASE clauses and simplify those. This has two problems:
            // 1. Our type system is not perfect replication of Hive so this can be incorrect
            // 2. Converted expression is harder to validate for correctness(because it appears different from input)
            Hook.REL_BUILDER_SIMPLIFY.add(Hook.propertyJ(false));
            relBuilder = RelBuilder.create(config);
        }
        return relBuilder;
    }

    /**
     * Gets calcite catalog reader.
     *
     * @return the calcite catalog reader
     */
    CalciteCatalogReader getCalciteCatalogReader() {
        CalciteConnectionConfig connectionConfig;
        if (config.getContext() != null) {
            connectionConfig = config.getContext().unwrap(CalciteConnectionConfig.clreplaced);
        } else {
            Properties properties = new Properties();
            properties.setProperty(CalciteConnectionProperty.CASE_SENSITIVE.camelName(), String.valueOf(false));
            connectionConfig = new CalciteConnectionConfigImpl(properties);
        }
        if (catalogReader == null) {
            catalogReader = new CalciteCatalogReader(config.getDefaultSchema().unwrap(CalciteSchema.clreplaced), ImmutableList.of(HiveSchema.ROOT_SCHEMA), getRelBuilder().getTypeFactory(), connectionConfig);
        }
        return catalogReader;
    }

    /**
     * Gets hive sql validator.
     *
     * @return the hive sql validator
     */
    HiveSqlValidator getHiveSqlValidator() {
        if (sqlValidator == null) {
            sqlValidator = new HiveSqlValidator(config.getOperatorTable(), getCalciteCatalogReader(), ((JavaTypeFactory) relBuilder.getTypeFactory()), HIVE_SQL);
        }
        return sqlValidator;
    }

    /**
     * Gets rel opt cluster.
     *
     * @return the rel opt cluster
     */
    RelOptCluster getRelOptCluster() {
        if (cluster == null) {
            cluster = RelOptCluster.create(new VolcanoPlanner(), getRelBuilder().getRexBuilder());
        }
        return cluster;
    }

    HiveViewExpander getViewExpander() {
        // we don't need to cache this...Okay to re-create each time
        return new HiveViewExpander(this);
    }

    /**
     * Gets sql to rel converter.
     *
     * @return the sql to rel converter
     */
    SqlToRelConverter getSqlToRelConverter() {
        if (relConverter == null) {
            relConverter = new HiveSqlToRelConverter(getViewExpander(), getHiveSqlValidator(), getCalciteCatalogReader(), getRelOptCluster(), convertletTable, SqlToRelConverter.configBuilder().build());
        }
        return relConverter;
    }
}

16 Source : UnionToDistinctRule.java
with Apache License 2.0
from lealone

// ~ Methods ----------------------------------------------------------------
public void onMatch(RelOptRuleCall call) {
    final Union union = call.rel(0);
    if (union.all) {
        // nothing to do
        return;
    }
    final RelBuilder relBuilder = call.builder();
    relBuilder.pushAll(union.getInputs());
    relBuilder.union(true, union.getInputs().size());
    relBuilder.distinct();
    call.transformTo(relBuilder.build());
}

16 Source : JoinCommuteRule.java
with Apache License 2.0
from lealone

@Override
public void onMatch(final RelOptRuleCall call) {
    Join join = call.rel(0);
    if (!join.getSystemFieldList().isEmpty()) {
        // FIXME Enable this rule for joins with system fields
        return;
    }
    final RelNode swapped = swap(join, this.swapOuter, call.builder());
    if (swapped == null) {
        return;
    }
    // The result is either a Project or, if the project is trivial, a
    // raw Join.
    final Join newJoin = swapped instanceof Join ? (Join) swapped : (Join) swapped.getInput(0);
    call.transformTo(swapped);
    // We have converted join='a join b' into swapped='select
    // a0,a1,a2,b0,b1 from b join a'. Now register that project='select
    // b0,b1,a0,a1,a2 from (select a0,a1,a2,b0,b1 from b join a)' is the
    // same as 'b join a'. If we didn't do this, the swap join rule
    // would fire on the new join, ad infinitum.
    final RelBuilder relBuilder = call.builder();
    final List<RexNode> exps = RelOptUtil.createSwappedJoinExprs(newJoin, join, false);
    relBuilder.push(swapped).project(exps, newJoin.getRowType().getFieldNames());
    call.getPlanner().ensureRegistered(relBuilder.build(), newJoin);
}

16 Source : FlinkSemiAntiJoinProjectTransposeRule.java
with Apache License 2.0
from flink-tpc-ds

// ~ Methods ----------------------------------------------------------------
public void onMatch(RelOptRuleCall call) {
    LogicalJoin join = call.rel(0);
    LogicalProject project = call.rel(1);
    // convert the semi/anti join condition to reflect the LHS with the project
    // pulled up
    RexNode newCondition = adjustCondition(project, join);
    Join newJoin = LogicalJoin.create(project.getInput(), join.getRight(), newCondition, join.getVariablesSet(), join.getJoinType());
    // Create the new projection. Note that the projection expressions
    // are the same as the original because they only reference the LHS
    // of the semi/anti join and the semi/anti join only projects out the LHS
    final RelBuilder relBuilder = call.builder();
    relBuilder.push(newJoin);
    relBuilder.project(project.getProjects(), project.getRowType().getFieldNames());
    call.transformTo(relBuilder.build());
}

16 Source : FlinkAggregateRemoveRule.java
with Apache License 2.0
from flink-tpc-ds

// ~ Methods ----------------------------------------------------------------
public void onMatch(RelOptRuleCall call) {
    final Aggregate aggregate = call.rel(0);
    final RelNode input = call.rel(1);
    // Distinct is "GROUP BY c1, c2" (where c1, c2 are a set of columns on
    // which the input is unique, i.e. contain a key) and has no aggregate
    // functions or the functions we enumerated. It can be removed.
    final RelNode newInput = convert(input, aggregate.getTraitSet().simplify());
    // If aggregate was projecting a subset of columns, add a project for the
    // same effect.
    final RelBuilder relBuilder = call.builder();
    relBuilder.push(newInput);
    List<Integer> projectIndices = new ArrayList<>(aggregate.getGroupSet().asList());
    for (AggregateCall aggCall : aggregate.getAggCallList()) {
        projectIndices.addAll(aggCall.getArgList());
    }
    relBuilder.project(relBuilder.fields(projectIndices));
    // Create a project if some of the columns have become
    // NOT NULL due to aggregate functions are removed
    relBuilder.convert(aggregate.getRowType(), true);
    call.transformTo(relBuilder.build());
}

16 Source : ExtendedAggregateExtractProjectRule.java
with Apache License 2.0
from flink-tpc-ds

private RelNode getNewAggregate(Aggregate oldAggregate, RelBuilder relBuilder, Mapping mapping) {
    final ImmutableBitSet newGroupSet = Mappings.apply(mapping, oldAggregate.getGroupSet());
    final Iterable<ImmutableBitSet> newGroupSets = oldAggregate.getGroupSets().stream().map(bitSet -> Mappings.apply(mapping, bitSet)).collect(Collectors.toList());
    final List<RelBuilder.AggCall> newAggCallList = getNewAggCallList(oldAggregate, relBuilder, mapping);
    final RelBuilder.GroupKey groupKey = relBuilder.groupKey(newGroupSet, newGroupSets);
    if (oldAggregate instanceof LogicalWindowAggregate) {
        if (newGroupSet.size() == 0 && newAggCallList.size() == 0) {
            // Return the old LogicalWindowAggregate directly, as we can't get an empty Aggregate
            // from the relBuilder.
            return oldAggregate;
        } else {
            relBuilder.aggregate(groupKey, newAggCallList);
            Aggregate newAggregate = (Aggregate) relBuilder.build();
            LogicalWindowAggregate oldLogicalWindowAggregate = (LogicalWindowAggregate) oldAggregate;
            return LogicalWindowAggregate.create(oldLogicalWindowAggregate.getWindow(), oldLogicalWindowAggregate.getNamedProperties(), newAggregate);
        }
    } else {
        relBuilder.aggregate(groupKey, newAggCallList);
        return relBuilder.build();
    }
}

16 Source : TestRelDataTypeSerializer.java
with Apache License 2.0
from dremio

public clreplaced TestRelDataTypeSerializer extends PlanTestBase {

    private RelOptCluster cluster;

    private RelDataTypeFactory typeFactory;

    private RelBuilder relBuilder;

    private LogicalPlanSerializer serializer;

    private LogicalPlanDeserializer deserializer;

    @Before
    public void setup() {
        final VolcanoPlanner planner = new VolcanoPlanner();
        typeFactory = SqlTypeFactoryImpl.INSTANCE;
        cluster = RelOptCluster.create(planner, new RexBuilder(typeFactory));
        relBuilder = RelFactories.LOGICAL_BUILDER.create(cluster, null);
        KryoRelSerializerFactory kryoRelSerializerFactory = new KryoRelSerializerFactory(null);
        serializer = kryoRelSerializerFactory.getSerializer(cluster);
        DremioCatalogReader catalogReader = Mockito.mock(DremioCatalogReader.clreplaced);
        deserializer = kryoRelSerializerFactory.getDeserializer(cluster, catalogReader, null);
    }

    @Test
    public void testRelDataTypeSerializer() {
        String col1 = UUID.randomUUID().toString();
        String col2 = UUID.randomUUID().toString();
        List<RelDataTypeField> fields = new ArrayList<>();
        RelDataTypeField field0 = new RelDataTypeFieldImpl(col1, 0, typeFactory.createSqlType(SqlTypeName.INTEGER));
        RelDataTypeField field1 = new RelDataTypeFieldImpl(col2, 1, typeFactory.createSqlType(SqlTypeName.VARCHAR));
        fields.add(field0);
        fields.add(field1);
        // Create a nullable struct type without using type factory cache
        final RelDataType nullableRecordType = new RelRecordType(StructKind.FULLY_QUALIFIED, fields, true);
        replacedert.replacedertTrue(nullableRecordType.isNullable());
        // Serde through Kryo serializer and double check nullability
        RelNode nullableRel = relBuilder.values(nullableRecordType).build();
        byte[] nullableSerialized = serializer.serializeToBytes(nullableRel);
        RelNode deserializedNullableRel = deserializer.deserialize(nullableSerialized);
        replacedert.replacedertTrue(deserializedNullableRel.getRowType().isNullable());
    }
}

16 Source : FilterProjectNLJRule.java
with Apache License 2.0
from dremio

@Override
public void onMatch(RelOptRuleCall call) {
    FilterPrel filter = call.rel(0);
    ProjectPrel project = call.rel(1);
    NestedLoopJoinPrel join = call.rel(2);
    RexNode newCondition = RelOptUtil.pushPastProject(filter.getCondition(), project);
    final RelBuilder relBuilder = call.builder();
    RelNode newFilterRel = filter.copy(filter.getTraitSet(), NestedLoopJoinPrel.create(join.getCluster(), join.getTraitSet(), join.getLeft(), join.getRight(), join.getJoinType(), join.getCondition(), join.getProjectedFields()), RexUtil.removeNullabilityCast(relBuilder.getTypeFactory(), newCondition));
    RelNode newProjRel = project.copy(project.getTraitSet(), newFilterRel, project.getProjects(), project.getRowType());
    call.transformTo(newProjRel);
}

16 Source : JoinNormalizationRule.java
with Apache License 2.0
from dremio

/**
 * Normalize the join relation operator
 *
 * Normalize the join operator so that conditions are fully push down
 * and if possible, remaining condition are extracted in a separate filter
 *
 * @param rel
 * @return a new tree of operators containing the normalized join, or {@code join} if
 * already normalized
 */
public RelNode normalize(Join join) {
    final RelBuilder builder = factory.create(join.getCluster(), null);
    RelNode newJoin = RelOptUtil.pushDownJoinConditions(join, builder);
    // If the join is the same, reset to the original join so we can bail out later
    if (newJoin instanceof Join) {
        final RexNode newCondition = ((Join) newJoin).getCondition();
        if (join.getCondition().toString().equals(newCondition.toString())) {
            newJoin = join;
        }
    }
    // newJoin might be a join, or might be a join below a project.
    // Need to visit the tree to find the first join and extract the remaining
    // condition in a separate filter
    newJoin = newJoin.accept(new RelShuttleImpl() {

        @Override
        public RelNode visit(RelNode other) {
            if (!(other instanceof Join)) {
                return super.visit(other);
            }
            Join join = (Join) other;
            return getNewJoinCondition(builder, join);
        }
    });
    return newJoin;
}

15 Source : SubQueryRemoveRule.java
with Apache License 2.0
from lealone

/**
 * Rewrites a scalar sub-query into an
 * {@link org.apache.calcite.rel.core.Aggregate}.
 *
 * @param e            IN sub-query to rewrite
 * @param variablesSet A set of variables used by a relational
 *                     expression of the specified RexSubQuery
 * @param builder      Builder
 * @param offset       Offset to shift {@link RexInputRef}
 *
 * @return Expression that may be used to replace the RexSubQuery
 */
private RexNode rewriteScalarQuery(RexSubQuery e, Set<CorrelationId> variablesSet, RelBuilder builder, int inputCount, int offset) {
    builder.push(e.getRel());
    final RelMetadataQuery mq = e.getRel().getCluster().getMetadataQuery();
    final Boolean unique = mq.areColumnsUnique(builder.peek(), ImmutableBitSet.of());
    if (unique == null || !unique) {
        builder.aggregate(builder.groupKey(), builder.aggregateCall(SqlStdOperatorTable.SINGLE_VALUE, builder.field(0)));
    }
    builder.join(JoinRelType.LEFT, builder.literal(true), variablesSet);
    return field(builder, inputCount, offset);
}

15 Source : PushProjector.java
with Apache License 2.0
from lealone

/**
 * PushProjector is a utility clreplaced used to perform operations used in push
 * projection rules.
 *
 * <p>Pushing is particularly interesting in the case of join, because there
 * are multiple inputs. Generally an expression can be pushed down to a
 * particular input if it depends upon no other inputs. If it can be pushed
 * down to both sides, it is pushed down to the left.
 *
 * <p>Sometimes an expression needs to be split before it can be pushed down.
 * To flag that an expression cannot be split, specify a rule that it must be
 * <dfn>preserved</dfn>. Such an expression will be pushed down intact to one
 * of the inputs, or not pushed down at all.</p>
 */
public clreplaced PushProjector {

    // ~ Instance fields --------------------------------------------------------
    private final Project origProj;

    private final RexNode origFilter;

    private final RelNode childRel;

    private final ExprCondition preserveExprCondition;

    private final RelBuilder relBuilder;

    /**
     * Original projection expressions
     */
    final List<RexNode> origProjExprs;

    /**
     * Fields from the RelNode that the projection is being pushed past
     */
    final List<RelDataTypeField> childFields;

    /**
     * Number of fields in the RelNode that the projection is being pushed past
     */
    final int nChildFields;

    /**
     * Bitmap containing the references in the original projection
     */
    final BitSet projRefs;

    /**
     * Bitmap containing the fields in the RelNode that the projection is being
     * pushed past, if the RelNode is not a join. If the RelNode is a join, then
     * the fields correspond to the left hand side of the join.
     */
    final ImmutableBitSet childBitmap;

    /**
     * Bitmap containing the fields in the right hand side of a join, in the
     * case where the projection is being pushed past a join. Not used
     * otherwise.
     */
    final ImmutableBitSet rightBitmap;

    /**
     * Bitmap containing the fields that should be strong, i.e. when preserving expressions
     * we can only preserve them if the expressions if it is null when these fields are null.
     */
    final ImmutableBitSet strongBitmap;

    /**
     * Number of fields in the RelNode that the projection is being pushed past,
     * if the RelNode is not a join. If the RelNode is a join, then this is the
     * number of fields in the left hand side of the join.
     *
     * <p>The idenreplacedy
     * {@code nChildFields == nSysFields + nFields + nFieldsRight}
     * holds. {@code nFields} does not include {@code nSysFields}.
     * The output of a join looks like this:
     *
     * <blockquote><pre>
     * | nSysFields | nFields | nFieldsRight |
     * </pre></blockquote>
     *
     * <p>The output of a single-input rel looks like this:
     *
     * <blockquote><pre>
     * | nSysFields | nFields |
     * </pre></blockquote>
     */
    final int nFields;

    /**
     * Number of fields in the right hand side of a join, in the case where the
     * projection is being pushed past a join. Always 0 otherwise.
     */
    final int nFieldsRight;

    /**
     * Number of system fields. System fields appear at the start of a join,
     * before the first field from the left input.
     */
    private final int nSysFields;

    /**
     * Expressions referenced in the projection/filter that should be preserved.
     * In the case where the projection is being pushed past a join, then the
     * list only contains the expressions corresponding to the left hand side of
     * the join.
     */
    final List<RexNode> childPreserveExprs;

    /**
     * Expressions referenced in the projection/filter that should be preserved,
     * corresponding to expressions on the right hand side of the join, if the
     * projection is being pushed past a join. Empty list otherwise.
     */
    final List<RexNode> rightPreserveExprs;

    /**
     * Number of system fields being projected.
     */
    int nSystemProject;

    /**
     * Number of fields being projected. In the case where the projection is
     * being pushed past a join, the number of fields being projected from the
     * left hand side of the join.
     */
    int nProject;

    /**
     * Number of fields being projected from the right hand side of a join, in
     * the case where the projection is being pushed past a join. 0 otherwise.
     */
    int nRightProject;

    /**
     * Rex builder used to create new expressions.
     */
    final RexBuilder rexBuilder;

    // ~ Constructors -----------------------------------------------------------
    /**
     * Creates a PushProjector object for pushing projects past a RelNode.
     *
     * @param origProj              the original projection that is being pushed;
     *                              may be null if the projection is implied as a
     *                              result of a projection having been trivially
     *                              removed
     * @param origFilter            the filter that the projection must also be
     *                              pushed past, if applicable
     * @param childRel              the RelNode that the projection is being
     *                              pushed past
     * @param preserveExprCondition condition for whether an expression should
     *                              be preserved in the projection
     */
    public PushProjector(Project origProj, RexNode origFilter, RelNode childRel, ExprCondition preserveExprCondition, RelBuilder relBuilder) {
        this.origProj = origProj;
        this.origFilter = origFilter;
        this.childRel = childRel;
        this.preserveExprCondition = preserveExprCondition;
        this.relBuilder = Objects.requireNonNull(relBuilder);
        if (origProj == null) {
            origProjExprs = ImmutableList.of();
        } else {
            origProjExprs = origProj.getProjects();
        }
        childFields = childRel.getRowType().getFieldList();
        nChildFields = childFields.size();
        projRefs = new BitSet(nChildFields);
        if (childRel instanceof Join) {
            Join joinRel = (Join) childRel;
            List<RelDataTypeField> leftFields = joinRel.getLeft().getRowType().getFieldList();
            List<RelDataTypeField> rightFields = joinRel.getRight().getRowType().getFieldList();
            nFields = leftFields.size();
            nFieldsRight = childRel instanceof SemiJoin ? 0 : rightFields.size();
            nSysFields = joinRel.getSystemFieldList().size();
            childBitmap = ImmutableBitSet.range(nSysFields, nFields + nSysFields);
            rightBitmap = ImmutableBitSet.range(nFields + nSysFields, nChildFields);
            switch(joinRel.getJoinType()) {
                case INNER:
                    strongBitmap = ImmutableBitSet.of();
                    break;
                case // All the left-input's columns must be strong
                RIGHT:
                    strongBitmap = ImmutableBitSet.range(nSysFields, nFields + nSysFields);
                    break;
                case // All the right-input's columns must be strong
                LEFT:
                    strongBitmap = ImmutableBitSet.range(nFields + nSysFields, nChildFields);
                    break;
                case FULL:
                default:
                    strongBitmap = ImmutableBitSet.range(nSysFields, nChildFields);
            }
        } else if (childRel instanceof Correlate) {
            Correlate corrRel = (Correlate) childRel;
            List<RelDataTypeField> leftFields = corrRel.getLeft().getRowType().getFieldList();
            List<RelDataTypeField> rightFields = corrRel.getRight().getRowType().getFieldList();
            nFields = leftFields.size();
            SemiJoinType joinType = corrRel.getJoinType();
            switch(joinType) {
                case SEMI:
                case ANTI:
                    nFieldsRight = 0;
                    break;
                default:
                    nFieldsRight = rightFields.size();
            }
            nSysFields = 0;
            childBitmap = ImmutableBitSet.range(0, nFields);
            rightBitmap = ImmutableBitSet.range(nFields, nChildFields);
            // Required columns need to be included in project
            projRefs.or(BitSets.of(corrRel.getRequiredColumns()));
            switch(joinType) {
                case INNER:
                    strongBitmap = ImmutableBitSet.of();
                    break;
                case ANTI:
                case // All the left-input's columns must be strong
                SEMI:
                    strongBitmap = ImmutableBitSet.range(0, nFields);
                    break;
                case // All the right-input's columns must be strong
                LEFT:
                    strongBitmap = ImmutableBitSet.range(nFields, nChildFields);
                    break;
                default:
                    strongBitmap = ImmutableBitSet.range(0, nChildFields);
            }
        } else {
            nFields = nChildFields;
            nFieldsRight = 0;
            childBitmap = ImmutableBitSet.range(nChildFields);
            rightBitmap = null;
            nSysFields = 0;
            strongBitmap = ImmutableBitSet.of();
        }
        replacedert nChildFields == nSysFields + nFields + nFieldsRight;
        childPreserveExprs = new ArrayList<>();
        rightPreserveExprs = new ArrayList<>();
        rexBuilder = childRel.getCluster().getRexBuilder();
    }

    // ~ Methods ----------------------------------------------------------------
    /**
     * Decomposes a projection to the input references referenced by a
     * projection and a filter, either of which is optional. If both are
     * provided, the filter is underneath the project.
     *
     * <p>Creates a projection containing all input references as well as
     * preserving any special expressions. Converts the original projection
     * and/or filter to reference the new projection. Then, finally puts on top,
     * a final projection corresponding to the original projection.
     *
     * @param defaultExpr expression to be used in the projection if no fields
     *                    or special columns are selected
     * @return the converted projection if it makes sense to push elements of
     * the projection; otherwise returns null
     */
    public RelNode convertProject(RexNode defaultExpr) {
        // locate all fields referenced in the projection and filter
        locateAllRefs();
        // if all columns are being selected (either explicitly in the
        // projection) or via a "select *", then there needs to be some
        // special expressions to preserve in the projection; otherwise,
        // there's no point in proceeding any further
        if (origProj == null) {
            if (childPreserveExprs.size() == 0) {
                return null;
            }
            // even though there is no projection, this is the same as
            // selecting all fields
            if (nChildFields > 0) {
                // Calling with nChildFields == 0 should be safe but hits
                // http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6222207
                projRefs.set(0, nChildFields);
            }
            nProject = nChildFields;
        } else if ((projRefs.cardinality() == nChildFields) && (childPreserveExprs.size() == 0)) {
            return null;
        }
        // if nothing is being selected from the underlying rel, just
        // project the default expression preplaceded in as a parameter or the
        // first column if there is no default expression
        if ((projRefs.cardinality() == 0) && (childPreserveExprs.size() == 0)) {
            if (defaultExpr != null) {
                childPreserveExprs.add(defaultExpr);
            } else if (nChildFields == 1) {
                return null;
            } else {
                projRefs.set(0);
                nProject = 1;
            }
        }
        // create a new projection referencing all fields referenced in
        // either the project or the filter
        RelNode newProject = createProjectRefsAndExprs(childRel, false, false);
        int[] adjustments = getAdjustments();
        // if a filter was preplaceded in, convert it to reference the projected
        // columns, placing it on top of the project just created
        RelNode projChild;
        if (origFilter != null) {
            RexNode newFilter = convertRefsAndExprs(origFilter, newProject.getRowType().getFieldList(), adjustments);
            relBuilder.push(newProject);
            relBuilder.filter(newFilter);
            projChild = relBuilder.build();
        } else {
            projChild = newProject;
        }
        // put the original project on top of the filter/project, converting
        // it to reference the modified projection list; otherwise, create
        // a projection that essentially selects all fields
        return createNewProject(projChild, adjustments);
    }

    /**
     * Locates all references found in either the projection expressions a
     * filter, as well as references to expressions that should be preserved.
     * Based on that, determines whether pushing the projection makes sense.
     *
     * @return true if all inputs from the child that the projection is being
     * pushed past are referenced in the projection/filter and no special
     * preserve expressions are referenced; in that case, it does not make sense
     * to push the projection
     */
    public boolean locateAllRefs() {
        RexUtil.apply(new InputSpecialOpFinder(projRefs, childBitmap, rightBitmap, strongBitmap, preserveExprCondition, childPreserveExprs, rightPreserveExprs), origProjExprs, origFilter);
        // The system fields of each child are always used by the join, even if
        // they are not projected out of it.
        projRefs.set(nSysFields, nSysFields + nSysFields, true);
        projRefs.set(nSysFields + nFields, nSysFields + nFields + nSysFields, true);
        // Count how many fields are projected.
        nSystemProject = 0;
        nProject = 0;
        nRightProject = 0;
        for (int bit : BitSets.toIter(projRefs)) {
            if (bit < nSysFields) {
                nSystemProject++;
            } else if (bit < nSysFields + nFields) {
                nProject++;
            } else {
                nRightProject++;
            }
        }
        replacedert nSystemProject + nProject + nRightProject == projRefs.cardinality();
        if ((childRel instanceof Join) || (childRel instanceof SetOp)) {
            // if nothing is projected from the children, arbitrarily project
            // the first columns; this is necessary since Fennel doesn't
            // handle 0-column projections
            if ((nProject == 0) && (childPreserveExprs.size() == 0)) {
                projRefs.set(0);
                nProject = 1;
            }
            if (childRel instanceof Join) {
                if ((nRightProject == 0) && (rightPreserveExprs.size() == 0)) {
                    projRefs.set(nFields);
                    nRightProject = 1;
                }
            }
        }
        // no need to push projections if all children fields are being
        // referenced and there are no special preserve expressions; note
        // that we need to do this check after we've handled the 0-column
        // project cases
        if (projRefs.cardinality() == nChildFields && childPreserveExprs.size() == 0 && rightPreserveExprs.size() == 0) {
            return true;
        }
        return false;
    }

    /**
     * Creates a projection based on the inputs specified in a bitmap and the
     * expressions that need to be preserved. The expressions are appended after
     * the input references.
     *
     * @param projChild child that the projection will be created on top of
     * @param adjust    if true, need to create new projection expressions;
     *                  otherwise, the existing ones are reused
     * @param rightSide if true, creating a projection for the right hand side
     *                  of a join
     * @return created projection
     */
    public Project createProjectRefsAndExprs(RelNode projChild, boolean adjust, boolean rightSide) {
        List<RexNode> preserveExprs;
        int nInputRefs;
        int offset;
        if (rightSide) {
            preserveExprs = rightPreserveExprs;
            nInputRefs = nRightProject;
            offset = nSysFields + nFields;
        } else {
            preserveExprs = childPreserveExprs;
            nInputRefs = nProject;
            offset = nSysFields;
        }
        int refIdx = offset - 1;
        List<Pair<RexNode, String>> newProjects = new ArrayList<>();
        List<RelDataTypeField> destFields = projChild.getRowType().getFieldList();
        // add on the input references
        for (int i = 0; i < nInputRefs; i++) {
            refIdx = projRefs.nextSetBit(refIdx + 1);
            replacedert refIdx >= 0;
            final RelDataTypeField destField = destFields.get(refIdx - offset);
            newProjects.add(Pair.of((RexNode) rexBuilder.makeInputRef(destField.getType(), refIdx - offset), destField.getName()));
        }
        // add on the expressions that need to be preserved, converting the
        // arguments to reference the projected columns (if necessary)
        int[] adjustments = {};
        if ((preserveExprs.size() > 0) && adjust) {
            adjustments = new int[childFields.size()];
            for (int idx = offset; idx < childFields.size(); idx++) {
                adjustments[idx] = -offset;
            }
        }
        for (RexNode projExpr : preserveExprs) {
            RexNode newExpr;
            if (adjust) {
                newExpr = projExpr.accept(new RelOptUtil.RexInputConverter(rexBuilder, childFields, destFields, adjustments));
            } else {
                newExpr = projExpr;
            }
            newProjects.add(Pair.of(newExpr, ((RexCall) projExpr).getOperator().getName()));
        }
        return (Project) relBuilder.push(projChild).projectNamed(Pair.left(newProjects), Pair.right(newProjects), true).build();
    }

    /**
     * Determines how much each input reference needs to be adjusted as a result
     * of projection
     *
     * @return array indicating how much each input needs to be adjusted by
     */
    public int[] getAdjustments() {
        int[] adjustments = new int[nChildFields];
        int newIdx = 0;
        int rightOffset = childPreserveExprs.size();
        for (int pos : BitSets.toIter(projRefs)) {
            adjustments[pos] = -(pos - newIdx);
            if (pos >= nSysFields + nFields) {
                adjustments[pos] += rightOffset;
            }
            newIdx++;
        }
        return adjustments;
    }

    /**
     * Clones an expression tree and walks through it, adjusting each
     * RexInputRef index by some amount, and converting expressions that need to
     * be preserved to field references.
     *
     * @param rex         the expression
     * @param destFields  fields that the new expressions will be referencing
     * @param adjustments the amount each input reference index needs to be
     *                    adjusted by
     * @return modified expression tree
     */
    public RexNode convertRefsAndExprs(RexNode rex, List<RelDataTypeField> destFields, int[] adjustments) {
        return rex.accept(new RefAndExprConverter(rexBuilder, childFields, destFields, adjustments, childPreserveExprs, nProject, rightPreserveExprs, nProject + childPreserveExprs.size() + nRightProject));
    }

    /**
     * Creates a new projection based on the original projection, adjusting all
     * input refs using an adjustment array preplaceded in. If there was no original
     * projection, create a new one that selects every field from the underlying
     * rel.
     *
     * <p>If the resulting projection would be trivial, return the child.
     *
     * @param projChild   child of the new project
     * @param adjustments array indicating how much each input reference should
     *                    be adjusted by
     * @return the created projection
     */
    public RelNode createNewProject(RelNode projChild, int[] adjustments) {
        final List<Pair<RexNode, String>> projects = new ArrayList<>();
        if (origProj != null) {
            for (Pair<RexNode, String> p : origProj.getNamedProjects()) {
                projects.add(Pair.of(convertRefsAndExprs(p.left, projChild.getRowType().getFieldList(), adjustments), p.right));
            }
        } else {
            for (Ord<RelDataTypeField> field : Ord.zip(childFields)) {
                projects.add(Pair.of((RexNode) rexBuilder.makeInputRef(field.e.getType(), field.i), field.e.getName()));
            }
        }
        return relBuilder.push(projChild).project(Pair.left(projects), Pair.right(projects)).build();
    }

    // ~ Inner Clreplacedes ----------------------------------------------------------
    /**
     * Visitor which builds a bitmap of the inputs used by an expressions, as
     * well as locating expressions corresponding to special operators.
     */
    private clreplaced InputSpecialOpFinder extends RexVisitorImpl<Void> {

        private final BitSet rexRefs;

        private final ImmutableBitSet leftFields;

        private final ImmutableBitSet rightFields;

        private final ImmutableBitSet strongFields;

        private final ExprCondition preserveExprCondition;

        private final List<RexNode> preserveLeft;

        private final List<RexNode> preserveRight;

        private final Strong strong;

        InputSpecialOpFinder(BitSet rexRefs, ImmutableBitSet leftFields, ImmutableBitSet rightFields, final ImmutableBitSet strongFields, ExprCondition preserveExprCondition, List<RexNode> preserveLeft, List<RexNode> preserveRight) {
            super(true);
            this.rexRefs = rexRefs;
            this.leftFields = leftFields;
            this.rightFields = rightFields;
            this.preserveExprCondition = preserveExprCondition;
            this.preserveLeft = preserveLeft;
            this.preserveRight = preserveRight;
            this.strongFields = strongFields;
            this.strong = Strong.of(strongFields);
        }

        public Void visitCall(RexCall call) {
            if (preserve(call)) {
                return null;
            }
            super.visitCall(call);
            return null;
        }

        private boolean isStrong(final ImmutableBitSet exprArgs, final RexNode call) {
            // If the expressions do not use any of the inputs that require output to be null,
            // no need to check.  Otherwise, check that the expression is null.
            // For example, in an "left outer join", we don't require that expressions
            // pushed down into the left input to be strong.  On the other hand,
            // expressions pushed into the right input must be.  In that case,
            // strongFields == right input fields.
            return !strongFields.intersects(exprArgs) || strong.isNull(call);
        }

        private boolean preserve(RexNode call) {
            if (preserveExprCondition.test(call)) {
                // if the arguments of the expression only reference the
                // left hand side, preserve it on the left; similarly, if
                // it only references expressions on the right
                final ImmutableBitSet exprArgs = RelOptUtil.InputFinder.bits(call);
                if (exprArgs.cardinality() > 0) {
                    if (leftFields.contains(exprArgs) && isStrong(exprArgs, call)) {
                        if (!preserveLeft.contains(call)) {
                            preserveLeft.add(call);
                        }
                        return true;
                    } else if (rightFields.contains(exprArgs) && isStrong(exprArgs, call)) {
                        replacedert preserveRight != null;
                        if (!preserveRight.contains(call)) {
                            preserveRight.add(call);
                        }
                        return true;
                    }
                }
            // if the expression arguments reference both the left and
            // right, fall through and don't attempt to preserve the
            // expression, but instead locate references and special
            // ops in the call operands
            }
            return false;
        }

        public Void visitInputRef(RexInputRef inputRef) {
            rexRefs.set(inputRef.getIndex());
            return null;
        }
    }

    /**
     * Walks an expression tree, replacing input refs with new values to reflect
     * projection and converting special expressions to field references.
     */
    private clreplaced RefAndExprConverter extends RelOptUtil.RexInputConverter {

        private final List<RexNode> preserveLeft;

        private final int firstLeftRef;

        private final List<RexNode> preserveRight;

        private final int firstRightRef;

        RefAndExprConverter(RexBuilder rexBuilder, List<RelDataTypeField> srcFields, List<RelDataTypeField> destFields, int[] adjustments, List<RexNode> preserveLeft, int firstLeftRef, List<RexNode> preserveRight, int firstRightRef) {
            super(rexBuilder, srcFields, destFields, adjustments);
            this.preserveLeft = preserveLeft;
            this.firstLeftRef = firstLeftRef;
            this.preserveRight = preserveRight;
            this.firstRightRef = firstRightRef;
        }

        public RexNode visitCall(RexCall call) {
            // if the expression corresponds to one that needs to be preserved,
            // convert it to a field reference; otherwise, convert the entire
            // expression
            int match = findExprInLists(call, preserveLeft, firstLeftRef, preserveRight, firstRightRef);
            if (match >= 0) {
                return rexBuilder.makeInputRef(destFields.get(match).getType(), match);
            }
            return super.visitCall(call);
        }

        /**
         * Looks for a matching RexNode from among two lists of RexNodes and
         * returns the offset into the list corresponding to the match, adjusted
         * by an amount, depending on whether the match was from the first or
         * second list.
         *
         * @param rex      RexNode that is being matched against
         * @param rexList1 first list of RexNodes
         * @param adjust1  adjustment if match occurred in first list
         * @param rexList2 second list of RexNodes
         * @param adjust2  adjustment if match occurred in the second list
         * @return index in the list corresponding to the matching RexNode; -1
         * if no match
         */
        private int findExprInLists(RexNode rex, List<RexNode> rexList1, int adjust1, List<RexNode> rexList2, int adjust2) {
            int match = rexList1.indexOf(rex);
            if (match >= 0) {
                return match + adjust1;
            }
            if (rexList2 != null) {
                match = rexList2.indexOf(rex);
                if (match >= 0) {
                    return match + adjust2;
                }
            }
            return -1;
        }
    }

    /**
     * A functor that replies true or false for a given expression.
     *
     * @see org.apache.calcite.rel.rules.PushProjector.OperatorExprCondition
     */
    public interface ExprCondition extends Predicate<RexNode> {

        /**
         * Evaluates a condition for a given expression.
         *
         * @param expr Expression
         * @return result of evaluating the condition
         */
        boolean test(RexNode expr);

        /**
         * Constant condition that replies {@code false} for all expressions.
         */
        ExprCondition FALSE = expr -> false;

        /**
         * Constant condition that replies {@code true} for all expressions.
         */
        ExprCondition TRUE = expr -> true;
    }

    /**
     * An expression condition that evaluates to true if the expression is
     * a call to one of a set of operators.
     */
    clreplaced OperatorExprCondition implements ExprCondition {

        private final Set<SqlOperator> operatorSet;

        /**
         * Creates an OperatorExprCondition.
         *
         * @param operatorSet Set of operators
         */
        OperatorExprCondition(Iterable<? extends SqlOperator> operatorSet) {
            this.operatorSet = ImmutableSet.copyOf(operatorSet);
        }

        public boolean test(RexNode expr) {
            return expr instanceof RexCall && operatorSet.contains(((RexCall) expr).getOperator());
        }
    }
}

15 Source : CalcRelSplitter.java
with Apache License 2.0
from lealone

/**
 * CalcRelSplitter operates on a
 * {@link org.apache.calcite.rel.core.Calc} with multiple {@link RexCall}
 * sub-expressions that cannot all be implemented by a single concrete
 * {@link RelNode}.
 *
 * <p>For example, the Java and Fennel calculator do not implement an identical
 * set of operators. The Calc can be used to split a single Calc with
 * mixed Java- and Fennel-only operators into a tree of Calc object that can
 * each be individually implemented by either Java or Fennel.and splits it into
 * several Calc instances.
 *
 * <p>Currently the splitter is only capable of handling two "rel types". That
 * is, it can deal with Java vs. Fennel Calcs, but not Java vs. Fennel vs.
 * some other type of Calc.
 *
 * <p>See {@link ProjectToWindowRule}
 * for an example of how this clreplaced is used.
 */
public abstract clreplaced CalcRelSplitter {

    // ~ Static fields/initializers ---------------------------------------------
    private static final Logger RULE_LOGGER = RelOptPlanner.LOGGER;

    // ~ Instance fields --------------------------------------------------------
    protected final RexProgram program;

    private final RelDataTypeFactory typeFactory;

    private final RelType[] relTypes;

    private final RelOptCluster cluster;

    private final RelTraitSet traits;

    private final RelNode child;

    protected final RelBuilder relBuilder;

    // ~ Constructors -----------------------------------------------------------
    /**
     * Constructs a CalcRelSplitter.
     *
     * @param calc     Calc to split
     * @param relTypes Array of rel types, e.g. {Java, Fennel}. Must be
     *                 distinct.
     */
    CalcRelSplitter(Calc calc, RelBuilder relBuilder, RelType[] relTypes) {
        this.relBuilder = relBuilder;
        for (int i = 0; i < relTypes.length; i++) {
            replacedert relTypes[i] != null;
            for (int j = 0; j < i; j++) {
                replacedert relTypes[i] != relTypes[j] : "Rel types must be distinct";
            }
        }
        this.program = calc.getProgram();
        this.cluster = calc.getCluster();
        this.traits = calc.getTraitSet();
        this.typeFactory = calc.getCluster().getTypeFactory();
        this.child = calc.getInput();
        this.relTypes = relTypes;
    }

    // ~ Methods ----------------------------------------------------------------
    RelNode execute() {
        // Check that program is valid. In particular, this means that every
        // expression is trivial (either an atom, or a function applied to
        // references to atoms) and every expression depends only on
        // expressions to the left.
        replacedert program.isValid(Litmus.THROW, null);
        final List<RexNode> exprList = program.getExprList();
        final RexNode[] exprs = exprList.toArray(new RexNode[0]);
        replacedert !RexUtil.containComplexExprs(exprList);
        // Figure out what level each expression belongs to.
        int[] exprLevels = new int[exprs.length];
        // The type of a level is given by
        // relTypes[levelTypeOrdinals[level]].
        int[] levelTypeOrdinals = new int[exprs.length];
        int levelCount = chooseLevels(exprs, -1, exprLevels, levelTypeOrdinals);
        // For each expression, figure out which is the highest level where it
        // is used.
        int[] exprMaxUsingLevelOrdinals = new HighestUsageFinder(exprs, exprLevels).getMaxUsingLevelOrdinals();
        // If expressions are used as outputs, mark them as higher than that.
        final List<RexLocalRef> projectRefList = program.getProjectList();
        final RexLocalRef conditionRef = program.getCondition();
        for (RexLocalRef projectRef : projectRefList) {
            exprMaxUsingLevelOrdinals[projectRef.getIndex()] = levelCount;
        }
        if (conditionRef != null) {
            exprMaxUsingLevelOrdinals[conditionRef.getIndex()] = levelCount;
        }
        // Print out what we've got.
        if (RULE_LOGGER.isTraceEnabled()) {
            traceLevelExpressions(exprs, exprLevels, levelTypeOrdinals, levelCount);
        }
        // Now build the calcs.
        RelNode rel = child;
        final int inputFieldCount = program.getInputRowType().getFieldCount();
        int[] inputExprOrdinals = idenreplacedyArray(inputFieldCount);
        boolean doneCondition = false;
        for (int level = 0; level < levelCount; level++) {
            final int[] projectExprOrdinals;
            final RelDataType outputRowType;
            if (level == (levelCount - 1)) {
                outputRowType = program.getOutputRowType();
                projectExprOrdinals = new int[projectRefList.size()];
                for (int i = 0; i < projectExprOrdinals.length; i++) {
                    projectExprOrdinals[i] = projectRefList.get(i).getIndex();
                }
            } else {
                outputRowType = null;
                // Project the expressions which are computed at this level or
                // before, and will be used at later levels.
                List<Integer> projectExprOrdinalList = new ArrayList<>();
                for (int i = 0; i < exprs.length; i++) {
                    RexNode expr = exprs[i];
                    if (expr instanceof RexLiteral) {
                        // Don't project literals. They are always created in
                        // the level where they are used.
                        exprLevels[i] = -1;
                        continue;
                    }
                    if ((exprLevels[i] <= level) && (exprMaxUsingLevelOrdinals[i] > level)) {
                        projectExprOrdinalList.add(i);
                    }
                }
                projectExprOrdinals = Ints.toArray(projectExprOrdinalList);
            }
            final RelType relType = relTypes[levelTypeOrdinals[level]];
            // Can we do the condition this level?
            int conditionExprOrdinal = -1;
            if ((conditionRef != null) && !doneCondition) {
                conditionExprOrdinal = conditionRef.getIndex();
                if ((exprLevels[conditionExprOrdinal] > level) || !relType.supportsCondition()) {
                    // stand down -- we're not ready to do the condition yet
                    conditionExprOrdinal = -1;
                } else {
                    doneCondition = true;
                }
            }
            RexProgram program1 = createProgramForLevel(level, levelCount, rel.getRowType(), exprs, exprLevels, inputExprOrdinals, projectExprOrdinals, conditionExprOrdinal, outputRowType);
            rel = relType.makeRel(cluster, traits, relBuilder, rel, program1);
            // Sometimes a level's program merely projects its inputs. We don't
            // want these. They cause an explosion in the search space.
            if (rel instanceof LogicalCalc && ((LogicalCalc) rel).getProgram().isTrivial()) {
                rel = rel.getInput(0);
            }
            rel = handle(rel);
            // The outputs of this level will be the inputs to the next level.
            inputExprOrdinals = projectExprOrdinals;
        }
        Preconditions.checkArgument(doneCondition || (conditionRef == null), "unhandled condition");
        return rel;
    }

    /**
     * Opportunity to further refine the relational expression created for a
     * given level. The default implementation returns the relational expression
     * unchanged.
     */
    protected RelNode handle(RelNode rel) {
        return rel;
    }

    /**
     * Figures out which expressions to calculate at which level.
     *
     * @param exprs             Array of expressions
     * @param conditionOrdinal  Ordinal of the condition expression, or -1 if no
     *                          condition
     * @param exprLevels        Level ordinal for each expression (output)
     * @param levelTypeOrdinals The type of each level (output)
     * @return Number of levels required
     */
    private int chooseLevels(final RexNode[] exprs, int conditionOrdinal, int[] exprLevels, int[] levelTypeOrdinals) {
        final int inputFieldCount = program.getInputRowType().getFieldCount();
        int levelCount = 0;
        final MaxInputFinder maxInputFinder = new MaxInputFinder(exprLevels);
        boolean[] relTypesPossibleForTopLevel = new boolean[relTypes.length];
        Arrays.fill(relTypesPossibleForTopLevel, true);
        // Compute the order in which to visit expressions.
        final List<Set<Integer>> cohorts = getCohorts();
        final List<Integer> permutation = computeTopologicalOrdering(exprs, cohorts);
        for (int i : permutation) {
            RexNode expr = exprs[i];
            final boolean condition = i == conditionOrdinal;
            if (i < inputFieldCount) {
                replacedert expr instanceof RexInputRef;
                exprLevels[i] = -1;
                continue;
            }
            // Deduce the minimum level of the expression. An expression must
            // be at a level greater than or equal to all of its inputs.
            int level = maxInputFinder.maxInputFor(expr);
            // If the expression is in a cohort, it can occur no lower than the
            // levels of other expressions in the same cohort.
            Set<Integer> cohort = findCohort(cohorts, i);
            if (cohort != null) {
                for (Integer exprOrdinal : cohort) {
                    if (exprOrdinal == i) {
                        // Already did this member of the cohort. It's a waste
                        // of effort to repeat.
                        continue;
                    }
                    final RexNode cohortExpr = exprs[exprOrdinal];
                    int cohortLevel = maxInputFinder.maxInputFor(cohortExpr);
                    if (cohortLevel > level) {
                        level = cohortLevel;
                    }
                }
            }
            // Try to implement this expression at this level.
            // If that is not possible, try to implement it at higher levels.
            levelLoop: for (; ; ++level) {
                if (level >= levelCount) {
                    // This is a new level. We can use any type we like.
                    for (int relTypeOrdinal = 0; relTypeOrdinal < relTypes.length; relTypeOrdinal++) {
                        if (!relTypesPossibleForTopLevel[relTypeOrdinal]) {
                            continue;
                        }
                        if (relTypes[relTypeOrdinal].canImplement(expr, condition)) {
                            // Success. We have found a type where we can
                            // implement this expression.
                            exprLevels[i] = level;
                            levelTypeOrdinals[level] = relTypeOrdinal;
                            replacedert (level == 0) || (levelTypeOrdinals[level - 1] != levelTypeOrdinals[level]) : "successive levels of same type";
                            // Figure out which of the other reltypes are
                            // still possible for this level.
                            // Previous reltypes are not possible.
                            for (int j = 0; j < relTypeOrdinal; ++j) {
                                relTypesPossibleForTopLevel[j] = false;
                            }
                            // Successive reltypes may be possible.
                            for (int j = relTypeOrdinal + 1; j < relTypes.length; ++j) {
                                if (relTypesPossibleForTopLevel[j]) {
                                    relTypesPossibleForTopLevel[j] = relTypes[j].canImplement(expr, condition);
                                }
                            }
                            // Move to next level.
                            levelTypeOrdinals[levelCount] = firstSet(relTypesPossibleForTopLevel);
                            ++levelCount;
                            Arrays.fill(relTypesPossibleForTopLevel, true);
                            break levelLoop;
                        }
                    }
                    // None of the reltypes still active for this level could
                    // implement expr. But maybe we could succeed with a new
                    // level, with all options open?
                    if (count(relTypesPossibleForTopLevel) >= relTypes.length) {
                        // Cannot implement for any type.
                        throw new replacedertionError("cannot implement " + expr);
                    }
                    levelTypeOrdinals[levelCount] = firstSet(relTypesPossibleForTopLevel);
                    ++levelCount;
                    Arrays.fill(relTypesPossibleForTopLevel, true);
                } else {
                    final int levelTypeOrdinal = levelTypeOrdinals[level];
                    if (!relTypes[levelTypeOrdinal].canImplement(expr, condition)) {
                        // Cannot implement this expression in this type;
                        // continue to next level.
                        continue;
                    }
                    exprLevels[i] = level;
                    break;
                }
            }
        }
        if (levelCount > 0) {
            // The latest level should be CalcRelType otherwise literals cannot be
            // implemented.
            replacedert "CalcRelType".equals(relTypes[0].name) : "The first RelType should be CalcRelType for proper RexLiteral" + " implementation at the last level, got " + relTypes[0].name;
            if (levelTypeOrdinals[levelCount - 1] != 0) {
                levelCount++;
            }
        }
        return levelCount;
    }

    /**
     * Computes the order in which to visit expressions, so that we decide the
     * level of an expression only after the levels of lower expressions have
     * been decided.
     *
     * <p>First, we need to ensure that an expression is visited after all of
     * its inputs.
     *
     * <p>Further, if the expression is a member of a cohort, we need to visit
     * it after the inputs of all other expressions in that cohort. With this
     * condition, expressions in the same cohort will very likely end up in the
     * same level.
     *
     * <p>Note that if there are no cohorts, the expressions from the
     * {@link RexProgram} are already in a suitable order. We perform the
     * topological sort just to ensure that the code path is well-trodden.
     *
     * @param exprs   Expressions
     * @param cohorts List of cohorts, each of which is a set of expr ordinals
     * @return Expression ordinals in topological order
     */
    private List<Integer> computeTopologicalOrdering(RexNode[] exprs, List<Set<Integer>> cohorts) {
        final DirectedGraph<Integer, DefaultEdge> graph = DefaultDirectedGraph.create();
        for (int i = 0; i < exprs.length; i++) {
            graph.addVertex(i);
        }
        for (int i = 0; i < exprs.length; i++) {
            final RexNode expr = exprs[i];
            final Set<Integer> cohort = findCohort(cohorts, i);
            final Set<Integer> targets;
            if (cohort == null) {
                targets = Collections.singleton(i);
            } else {
                targets = cohort;
            }
            expr.accept(new RexVisitorImpl<Void>(true) {

                @Override
                public Void visitLocalRef(RexLocalRef localRef) {
                    for (Integer target : targets) {
                        graph.addEdge(localRef.getIndex(), target);
                    }
                    return null;
                }
            });
        }
        TopologicalOrderIterator<Integer, DefaultEdge> iter = new TopologicalOrderIterator<>(graph);
        final List<Integer> permutation = new ArrayList<>();
        while (iter.hasNext()) {
            permutation.add(iter.next());
        }
        return permutation;
    }

    /**
     * Finds the cohort that contains the given integer, or returns null.
     *
     * @param cohorts List of cohorts, each a set of integers
     * @param ordinal Integer to search for
     * @return Cohort that contains the integer, or null if not found
     */
    private static Set<Integer> findCohort(List<Set<Integer>> cohorts, int ordinal) {
        for (Set<Integer> cohort : cohorts) {
            if (cohort.contains(ordinal)) {
                return cohort;
            }
        }
        return null;
    }

    private int[] idenreplacedyArray(int length) {
        final int[] ints = new int[length];
        for (int i = 0; i < ints.length; i++) {
            ints[i] = i;
        }
        return ints;
    }

    /**
     * Creates a program containing the expressions for a given level.
     *
     * <p>The expression list of the program will consist of all entries in the
     * expression list <code>allExprs[i]</code> for which the corresponding
     * level ordinal <code>exprLevels[i]</code> is equal to <code>level</code>.
     * Expressions are mapped according to <code>inputExprOrdinals</code>.
     *
     * @param level                Level ordinal
     * @param levelCount           Number of levels
     * @param inputRowType         Input row type
     * @param allExprs             Array of all expressions
     * @param exprLevels           Array of the level ordinal of each expression
     * @param inputExprOrdinals    Ordinals in the expression list of input
     *                             expressions. Input expression <code>i</code>
     *                             will be found at position
     *                             <code>inputExprOrdinals[i]</code>.
     * @param projectExprOrdinals  Ordinals of the expressions to be output this
     *                             level.
     * @param conditionExprOrdinal Ordinal of the expression to form the
     *                             condition for this level, or -1 if there is no
     *                             condition.
     * @param outputRowType        Output row type
     * @return Relational expression
     */
    private RexProgram createProgramForLevel(int level, int levelCount, RelDataType inputRowType, RexNode[] allExprs, int[] exprLevels, int[] inputExprOrdinals, final int[] projectExprOrdinals, int conditionExprOrdinal, RelDataType outputRowType) {
        // Build a list of expressions to form the calc.
        List<RexNode> exprs = new ArrayList<>();
        // exprInverseOrdinals describes where an expression in allExprs comes
        // from -- from an input, from a calculated expression, or -1 if not
        // available at this level.
        int[] exprInverseOrdinals = new int[allExprs.length];
        Arrays.fill(exprInverseOrdinals, -1);
        int j = 0;
        // First populate the inputs. They were computed at some previous level
        // and are used here.
        for (int i = 0; i < inputExprOrdinals.length; i++) {
            final int inputExprOrdinal = inputExprOrdinals[i];
            exprs.add(RexBuilder.getRexFactory().makeInputRef(i, allExprs[inputExprOrdinal].getType()));
            exprInverseOrdinals[inputExprOrdinal] = j;
            ++j;
        }
        // Next populate the computed expressions.
        final RexShuttle shuttle = new InputToCommonExprConverter(exprInverseOrdinals, exprLevels, level, inputExprOrdinals, allExprs);
        for (int i = 0; i < allExprs.length; i++) {
            if (exprLevels[i] == level || exprLevels[i] == -1 && level == (levelCount - 1) && allExprs[i] instanceof RexLiteral) {
                RexNode expr = allExprs[i];
                final RexNode translatedExpr = expr.accept(shuttle);
                exprs.add(translatedExpr);
                replacedert exprInverseOrdinals[i] == -1;
                exprInverseOrdinals[i] = j;
                ++j;
            }
        }
        // Form the projection and condition list. Project and condition
        // ordinals are offsets into allExprs, so we need to map them into
        // exprs.
        final List<RexLocalRef> projectRefs = new ArrayList<>(projectExprOrdinals.length);
        final List<String> fieldNames = new ArrayList<>(projectExprOrdinals.length);
        for (int i = 0; i < projectExprOrdinals.length; i++) {
            final int projectExprOrdinal = projectExprOrdinals[i];
            final int index = exprInverseOrdinals[projectExprOrdinal];
            replacedert index >= 0;
            RexNode expr = allExprs[projectExprOrdinal];
            projectRefs.add(RexBuilder.getRexFactory().makeLocalRef(index, expr.getType()));
            // Inherit meaningful field name if possible.
            fieldNames.add(deriveFieldName(expr, i));
        }
        RexLocalRef conditionRef;
        if (conditionExprOrdinal >= 0) {
            final int index = exprInverseOrdinals[conditionExprOrdinal];
            conditionRef = RexBuilder.getRexFactory().makeLocalRef(index, allExprs[conditionExprOrdinal].getType());
        } else {
            conditionRef = null;
        }
        if (outputRowType == null) {
            outputRowType = RexUtil.createStructType(typeFactory, projectRefs, fieldNames, null);
        }
        final RexProgram program = new RexProgram(inputRowType, exprs, projectRefs, conditionRef, outputRowType);
        // Program is NOT normalized here (e.g. can contain literals in
        // call operands), since literals should be inlined.
        return program;
    }

    private String deriveFieldName(RexNode expr, int ordinal) {
        if (expr instanceof RexInputRef) {
            int inputIndex = ((RexInputRef) expr).getIndex();
            String fieldName = child.getRowType().getFieldList().get(inputIndex).getName();
            // Don't inherit field names like '$3' from child: that's
            // confusing.
            if (!fieldName.startsWith("$") || fieldName.startsWith("$EXPR")) {
                return fieldName;
            }
        }
        return "$" + ordinal;
    }

    /**
     * Traces the given array of level expression lists at the finer level.
     *
     * @param exprs             Array expressions
     * @param exprLevels        For each expression, the ordinal of its level
     * @param levelTypeOrdinals For each level, the ordinal of its type in
     *                          the {@link #relTypes} array
     * @param levelCount        The number of levels
     */
    private void traceLevelExpressions(RexNode[] exprs, int[] exprLevels, int[] levelTypeOrdinals, int levelCount) {
        StringWriter traceMsg = new StringWriter();
        PrintWriter traceWriter = new PrintWriter(traceMsg);
        traceWriter.println("FarragoAutoCalcRule result expressions for: ");
        traceWriter.println(program.toString());
        for (int level = 0; level < levelCount; level++) {
            traceWriter.println("Rel Level " + level + ", type " + relTypes[levelTypeOrdinals[level]]);
            for (int i = 0; i < exprs.length; i++) {
                RexNode expr = exprs[i];
                replacedert (exprLevels[i] >= -1) && (exprLevels[i] < levelCount) : "expression's level is out of range";
                if (exprLevels[i] == level) {
                    traceWriter.println("\t" + i + ": " + expr);
                }
            }
            traceWriter.println();
        }
        String msg = traceMsg.toString();
        RULE_LOGGER.trace(msg);
    }

    /**
     * Returns the number of bits set in an array.
     */
    private static int count(boolean[] booleans) {
        int count = 0;
        for (boolean b : booleans) {
            if (b) {
                ++count;
            }
        }
        return count;
    }

    /**
     * Returns the index of the first set bit in an array.
     */
    private static int firstSet(boolean[] booleans) {
        for (int i = 0; i < booleans.length; i++) {
            if (booleans[i]) {
                return i;
            }
        }
        return -1;
    }

    /**
     * Searches for a value in a map, and returns the position where it was
     * found, or -1.
     *
     * @param value Value to search for
     * @param map   Map to search in
     * @return Ordinal of value in map, or -1 if not found
     */
    private static int indexOf(int value, int[] map) {
        for (int i = 0; i < map.length; i++) {
            if (value == map[i]) {
                return i;
            }
        }
        return -1;
    }

    /**
     * Returns whether a relational expression can be implemented solely in a
     * given {@link RelType}.
     *
     * @param rel         Calculation relational expression
     * @param relTypeName Name of a {@link RelType}
     * @return Whether relational expression can be implemented
     */
    protected boolean canImplement(LogicalCalc rel, String relTypeName) {
        for (RelType relType : relTypes) {
            if (relType.name.equals(relTypeName)) {
                return relType.canImplement(rel.getProgram());
            }
        }
        throw new replacedertionError("unknown type " + relTypeName);
    }

    /**
     * Returns a list of sets of expressions that should be on the same level.
     *
     * <p>For example, if this method returns { {3, 5}, {4, 7} }, it means that
     * expressions 3 and 5, should be on the same level, and expressions 4 and 7
     * should be on the same level. The two cohorts do not need to be on the
     * same level.
     *
     * <p>The list is best effort. If it is not possible to arrange that the
     * expressions in a cohort are on the same level, the {@link #execute()}
     * method will still succeed.
     *
     * <p>The default implementation of this method returns the empty list;
     * expressions will be put on the most suitable level. This is generally
     * the lowest possible level, except for literals, which are placed at the
     * level where they are used.
     *
     * @return List of cohorts, that is sets of expressions, that the splitting
     * algorithm should attempt to place on the same level
     */
    protected List<Set<Integer>> getCohorts() {
        return Collections.emptyList();
    }

    // ~ Inner Clreplacedes ----------------------------------------------------------
    /**
     * Type of relational expression. Determines which kinds of
     * expressions it can handle.
     */
    public abstract static clreplaced RelType {

        private final String name;

        public RelType(String name) {
            this.name = name;
        }

        @Override
        public String toString() {
            return name;
        }

        protected abstract boolean canImplement(RexFieldAccess field);

        protected abstract boolean canImplement(RexDynamicParam param);

        protected abstract boolean canImplement(RexLiteral literal);

        protected abstract boolean canImplement(RexCall call);

        protected boolean supportsCondition() {
            return true;
        }

        protected RelNode makeRel(RelOptCluster cluster, RelTraitSet traitSet, RelBuilder relBuilder, RelNode input, RexProgram program) {
            return LogicalCalc.create(input, program);
        }

        /**
         * Returns whether this <code>RelType</code> can implement a given
         * expression.
         *
         * @param expr      Expression
         * @param condition Whether expression is a condition
         * @return Whether this <code>RelType</code> can implement a given
         * expression.
         */
        public boolean canImplement(RexNode expr, boolean condition) {
            if (condition && !supportsCondition()) {
                return false;
            }
            try {
                expr.accept(new ImplementTester(this));
                return true;
            } catch (CannotImplement e) {
                Util.swallow(e, null);
                return false;
            }
        }

        /**
         * Returns whether this tester's <code>RelType</code> can implement a
         * given program.
         *
         * @param program Program
         * @return Whether this tester's <code>RelType</code> can implement a
         * given program.
         */
        public boolean canImplement(RexProgram program) {
            if ((program.getCondition() != null) && !canImplement(program.getCondition(), true)) {
                return false;
            }
            for (RexNode expr : program.getExprList()) {
                if (!canImplement(expr, false)) {
                    return false;
                }
            }
            return true;
        }
    }

    /**
     * Visitor which returns whether an expression can be implemented in a given
     * type of relational expression.
     */
    private static clreplaced ImplementTester extends RexVisitorImpl<Void> {

        private final RelType relType;

        ImplementTester(RelType relType) {
            super(false);
            this.relType = relType;
        }

        @Override
        public Void visitCall(RexCall call) {
            if (!relType.canImplement(call)) {
                throw CannotImplement.INSTANCE;
            }
            return null;
        }

        @Override
        public Void visitDynamicParam(RexDynamicParam dynamicParam) {
            if (!relType.canImplement(dynamicParam)) {
                throw CannotImplement.INSTANCE;
            }
            return null;
        }

        @Override
        public Void visitFieldAccess(RexFieldAccess fieldAccess) {
            if (!relType.canImplement(fieldAccess)) {
                throw CannotImplement.INSTANCE;
            }
            return null;
        }

        @Override
        public Void visitLiteral(RexLiteral literal) {
            if (!relType.canImplement(literal)) {
                throw CannotImplement.INSTANCE;
            }
            return null;
        }
    }

    /**
     * Control exception for {@link ImplementTester}.
     */
    private static clreplaced CannotImplement extends RuntimeException {

        @SuppressWarnings("ThrowableInstanceNeverThrown")
        static final CannotImplement INSTANCE = new CannotImplement();
    }

    /**
     * Shuttle which converts every reference to an input field in an expression
     * to a reference to a common sub-expression.
     */
    private static clreplaced InputToCommonExprConverter extends RexShuttle {

        private final int[] exprInverseOrdinals;

        private final int[] exprLevels;

        private final int level;

        private final int[] inputExprOrdinals;

        private final RexNode[] allExprs;

        InputToCommonExprConverter(int[] exprInverseOrdinals, int[] exprLevels, int level, int[] inputExprOrdinals, RexNode[] allExprs) {
            this.exprInverseOrdinals = exprInverseOrdinals;
            this.exprLevels = exprLevels;
            this.level = level;
            this.inputExprOrdinals = inputExprOrdinals;
            this.allExprs = allExprs;
        }

        @Override
        public RexNode visitInputRef(RexInputRef input) {
            final int index = exprInverseOrdinals[input.getIndex()];
            replacedert index >= 0;
            return RexBuilder.getRexFactory().makeLocalRef(index, input.getType());
        }

        @Override
        public RexNode visitLocalRef(RexLocalRef local) {
            // A reference to a local variable becomes a reference to an input
            // if the local was computed at a previous level.
            final int localIndex = local.getIndex();
            final int exprLevel = exprLevels[localIndex];
            if (exprLevel < level) {
                if (allExprs[localIndex] instanceof RexLiteral) {
                    // Expression is to be inlined. Use the original expression.
                    return allExprs[localIndex];
                }
                int inputIndex = indexOf(localIndex, inputExprOrdinals);
                replacedert inputIndex >= 0;
                return RexBuilder.getRexFactory().makeLocalRef(inputIndex, local.getType());
            } else {
                // It's a reference to what was a local expression at the
                // previous level, and was then projected.
                final int exprIndex = exprInverseOrdinals[localIndex];
                return RexBuilder.getRexFactory().makeLocalRef(exprIndex, local.getType());
            }
        }
    }

    /**
     * Finds the highest level used by any of the inputs of a given expression.
     */
    private static clreplaced MaxInputFinder extends RexVisitorImpl<Void> {

        int level;

        private final int[] exprLevels;

        MaxInputFinder(int[] exprLevels) {
            super(true);
            this.exprLevels = exprLevels;
        }

        @Override
        public Void visitLocalRef(RexLocalRef localRef) {
            int inputLevel = exprLevels[localRef.getIndex()];
            level = Math.max(level, inputLevel);
            return null;
        }

        /**
         * Returns the highest level of any of the inputs of an expression.
         */
        public int maxInputFor(RexNode expr) {
            level = 0;
            expr.accept(this);
            return level;
        }
    }

    /**
     * Builds an array of the highest level which contains an expression which
     * uses each expression as an input.
     */
    private static clreplaced HighestUsageFinder extends RexVisitorImpl<Void> {

        private final int[] maxUsingLevelOrdinals;

        private int currentLevel;

        HighestUsageFinder(RexNode[] exprs, int[] exprLevels) {
            super(true);
            this.maxUsingLevelOrdinals = new int[exprs.length];
            Arrays.fill(maxUsingLevelOrdinals, -1);
            for (int i = 0; i < exprs.length; i++) {
                if (exprs[i] instanceof RexLiteral) {
                    // Literals are always used directly. It never makes sense
                    // to compute them at a lower level and project them to
                    // where they are used.
                    maxUsingLevelOrdinals[i] = -1;
                    continue;
                }
                currentLevel = exprLevels[i];
                exprs[i].accept(this);
            }
        }

        public int[] getMaxUsingLevelOrdinals() {
            return maxUsingLevelOrdinals;
        }

        @Override
        public Void visitLocalRef(RexLocalRef ref) {
            final int index = ref.getIndex();
            maxUsingLevelOrdinals[index] = Math.max(maxUsingLevelOrdinals[index], currentLevel);
            return null;
        }
    }
}

15 Source : AggregateUnionTransposeRule.java
with Apache License 2.0
from lealone

public void onMatch(RelOptRuleCall call) {
    Aggregate aggRel = call.rel(0);
    Union union = call.rel(1);
    if (!union.all) {
        // This transformation is only valid for UNION ALL.
        // Consider t1(i) with rows (5), (5) and t2(i) with
        // rows (5), (10), and the query
        // select sum(i) from (select i from t1) union (select i from t2).
        // The correct answer is 15.  If we apply the transformation,
        // we get
        // select sum(i) from
        // (select sum(i) as i from t1) union (select sum(i) as i from t2)
        // which yields 25 (incorrect).
        return;
    }
    int groupCount = aggRel.getGroupSet().cardinality();
    List<AggregateCall> transformedAggCalls = transformAggCalls(aggRel.copy(aggRel.getTraitSet(), aggRel.getInput(), false, aggRel.getGroupSet(), null, aggRel.getAggCallList()), groupCount, aggRel.getAggCallList());
    if (transformedAggCalls == null) {
        // we've detected the presence of something like AVG,
        // which we can't handle
        return;
    }
    // create corresponding aggregates on top of each union child
    final RelBuilder relBuilder = call.builder();
    int transformCount = 0;
    final RelMetadataQuery mq = call.getMetadataQuery();
    for (RelNode input : union.getInputs()) {
        boolean alreadyUnique = RelMdUtil.areColumnsDefinitelyUnique(mq, input, aggRel.getGroupSet());
        relBuilder.push(input);
        if (!alreadyUnique) {
            ++transformCount;
            relBuilder.aggregate(relBuilder.groupKey(aggRel.getGroupSet()), aggRel.getAggCallList());
        }
    }
    if (transformCount == 0) {
        // none of the children could benefit from the push-down,
        // so bail out (preventing the infinite loop to which most
        // planners would sucreplacedb)
        return;
    }
    // create a new union whose children are the aggregates created above
    relBuilder.union(true, union.getInputs().size());
    relBuilder.aggregate(relBuilder.groupKey(aggRel.getGroupSet(), aggRel.getGroupSets()), transformedAggCalls);
    call.transformTo(relBuilder.build());
}

15 Source : JoinFilterCanonicalizationRule.java
with Apache License 2.0
from dremio

@Override
public void onMatch(RelOptRuleCall call) {
    final Join join = call.rel(0);
    final RelNode left = join.getLeft();
    final RelNode right = join.getRight();
    RelBuilder builder = factory.create(join.getCluster(), null);
    RelNode newJoin = canonicalizeJoinCondition(builder, join.getJoinType(), join.getCondition(), left, right);
    if (newJoin != null) {
        call.transformTo(newJoin);
    }
}

15 Source : JoinFilterCanonicalizationRule.java
with Apache License 2.0
from dremio

/**
 * Create a join operator with a canonicalized version of {@code joinCondition}
 *
 * @param builder
 * @param joinType
 * @param joinCondition
 * @param left
 * @param right
 * @return the new join operator, or {@code null} if {@code joinCondition} hasn't changed.
 */
private RelNode canonicalizeJoinCondition(RelBuilder builder, JoinRelType joinType, RexNode joinCondition, RelNode left, RelNode right) {
    final List<Integer> leftKeys = Lists.newArrayList();
    final List<Integer> rightKeys = Lists.newArrayList();
    final List<Boolean> filterNulls = Lists.newArrayList();
    final RexNode remaining = RelOptUtil.splitJoinCondition(left, right, joinCondition, leftKeys, rightKeys, filterNulls);
    // Create a normalized join condition
    final RexNode newPartialJoinCondition = buildJoinCondition(builder.getRexBuilder(), left.getRowType(), right.getRowType(), leftKeys, rightKeys, filterNulls);
    // Add the remaining filter condition
    final RexNode newJoinCondition = RelOptUtil.andJoinFilters(builder.getRexBuilder(), newPartialJoinCondition, remaining);
    // terminate if the same condition as previously
    if (RexUtil.eq(joinCondition, newJoinCondition)) {
        return null;
    }
    builder.pushAll(ImmutableList.of(left, right));
    builder.join(joinType, newJoinCondition);
    return builder.build();
}

14 Source : HiveToRelConverterTest.java
with BSD 2-Clause "Simplified" License
from linkedin

@Test
public void testBasic() {
    String sql = "SELECT * from foo";
    RelNode rel = converter.convertSql(sql);
    RelBuilder relBuilder = createRelBuilder();
    RelNode expected = relBuilder.scan(ImmutableList.of("hive", "default", "foo")).project(ImmutableList.of(relBuilder.field("a"), relBuilder.field("b"), relBuilder.field("c")), ImmutableList.of(), true).build();
    verifyRel(rel, expected);
}

14 Source : SubQueryRemoveRule.java
with Apache License 2.0
from lealone

/**
 * Rewrites an EXISTS RexSubQuery into a {@link Join}.
 *
 * @param e            EXISTS sub-query to rewrite
 * @param variablesSet A set of variables used by a relational
 *                     expression of the specified RexSubQuery
 * @param logic        Logic for evaluating
 * @param builder      Builder
 *
 * @return Expression that may be used to replace the RexSubQuery
 */
private RexNode rewriteExists(RexSubQuery e, Set<CorrelationId> variablesSet, RelOptUtil.Logic logic, RelBuilder builder) {
    builder.push(e.getRel());
    builder.project(builder.alias(builder.literal(true), "i"));
    switch(logic) {
        case TRUE:
            // Handles queries with single EXISTS in filter condition:
            // select e.deptno from emp as e
            // where exists (select deptno from emp)
            builder.aggregate(builder.groupKey(0));
            builder.as("dt");
            builder.join(JoinRelType.INNER, builder.literal(true), variablesSet);
            return builder.literal(true);
        default:
            builder.distinct();
    }
    builder.as("dt");
    builder.join(JoinRelType.LEFT, builder.literal(true), variablesSet);
    return builder.isNotNull(Util.last(builder.fields()));
}

14 Source : ProjectMultiJoinMergeRule.java
with Apache License 2.0
from lealone

// ~ Methods ----------------------------------------------------------------
public void onMatch(RelOptRuleCall call) {
    LogicalProject project = call.rel(0);
    MultiJoin multiJoin = call.rel(1);
    // if all inputs have their projFields set, then projection information
    // has already been pushed into each input
    boolean allSet = true;
    for (int i = 0; i < multiJoin.getInputs().size(); i++) {
        if (multiJoin.getProjFields().get(i) == null) {
            allSet = false;
            break;
        }
    }
    if (allSet) {
        return;
    }
    // create a new MultiJoin that reflects the columns in the projection
    // above the MultiJoin
    final RelBuilder relBuilder = call.builder();
    MultiJoin newMultiJoin = RelOptUtil.projectMultiJoin(multiJoin, project);
    relBuilder.push(newMultiJoin).project(project.getProjects(), project.getRowType().getFieldNames());
    call.transformTo(relBuilder.build());
}

14 Source : ComplexUnnestVisitor.java
with Apache License 2.0
from lealone

@Override
public RelNode visit(LogicalCorrelate correlate) {
    RelNode left = correlate.getLeft().accept(this);
    leftInputs.put(correlate.getCorrelationId(), left);
    RelNode right = correlate.getRight().accept(this);
    // if right input wasn't changed or left input wasn't changed
    // after rewriting right input, no need to create Correlate with new CorrelationId
    if (correlate.getRight() == right || left == leftInputs.get(correlate.getCorrelationId())) {
        if (correlate.getLeft() == left) {
            return correlate;
        }
        // changed only inputs, but CorrelationId left the same
        return correlate.copy(correlate.getTraitSet(), Arrays.asList(left, right));
    }
    Correlate newCorrelate = correlate.copy(correlate.getTraitSet(), leftInputs.get(correlate.getCorrelationId()), right, updatedCorrelationIds.get(correlate.getCorrelationId()), ImmutableBitSet.of(left.getRowType().getFieldCount()), correlate.getJoinType());
    RelBuilder builder = DrillRelFactories.LOGICAL_BUILDER.create(correlate.getCluster(), null);
    builder.push(newCorrelate);
    List<RexNode> topProjectExpressions = left.getRowType().getFieldList().stream().map(field -> builder.getRexBuilder().makeInputRef(newCorrelate, field.getIndex())).collect(Collectors.toList());
    // Accommodate the new $COMPLEX_FIELD_NAME column.
    int rightStartIndex = left.getRowType().getFieldList().size() + 1;
    switch(correlate.getJoinType()) {
        case LEFT:
        case INNER:
            // adds field from the right input of correlate to the top project
            topProjectExpressions.addAll(right.getRowType().getFieldList().stream().map(field -> builder.getRexBuilder().makeInputRef(newCorrelate, field.getIndex() + rightStartIndex)).collect(Collectors.toList()));
        // fall through
        case ANTI:
        case SEMI:
            builder.project(topProjectExpressions, correlate.getRowType().getFieldNames());
    }
    return builder.build();
}

14 Source : AggregateProjectReduceRule.java
with Apache License 2.0
from Kyligence

@Override
public void onMatch(RelOptRuleCall call) {
    LogicalAggregate aggr = call.rel(0);
    LogicalProject project = call.rel(1);
    // generate input ref in group set mapping between old and new project
    List<Pair<RexNode, String>> projects = project.getNamedProjects();
    List<Pair<RexNode, String>> newProjects = new ArrayList<>();
    Map<Integer, Integer> mapping = new HashMap<>();
    for (int key : aggr.getGroupSet()) {
        mappingKeys(key, projects.get(key), newProjects, mapping);
    }
    // create new group set
    final ImmutableBitSet newGroupSet = aggr.getGroupSet().permute(mapping);
    // mapping input ref in aggr calls and generate new aggr calls
    final ImmutableList.Builder<AggregateCall> newAggrCalls = ImmutableList.builder();
    for (AggregateCall aggrCall : aggr.getAggCallList()) {
        final ImmutableList.Builder<Integer> newArgs = ImmutableList.builder();
        for (int key : aggrCall.getArgList()) {
            mappingKeys(key, projects.get(key), newProjects, mapping);
            newArgs.add(mapping.get(key));
        }
        final int newFilterArg;
        if (aggrCall.filterArg > 0) {
            int key = aggrCall.filterArg;
            mappingKeys(key, projects.get(key), newProjects, mapping);
            newFilterArg = mapping.get(aggrCall.filterArg);
        } else {
            newFilterArg = -1;
        }
        newAggrCalls.add(aggrCall.copy(newArgs.build(), newFilterArg));
    }
    // just return if nothing changed
    if (newProjects.equals(project.getNamedProjects())) {
        return;
    }
    RelBuilder relBuilder = call.builder();
    relBuilder.push(project.getInput());
    relBuilder.project(Pair.left(newProjects), Pair.right(newProjects));
    relBuilder.aggregate(relBuilder.groupKey(newGroupSet, false, null), newAggrCalls.build());
    RelNode rel = relBuilder.build();
    call.transformTo(rel);
}

13 Source : DrdsRunner.java
with GNU General Public License v3.0
from MyCATApache

private RelNode getRelRoot(String defaultSchemaName, SchemaPlus plus, DrdsSql drdsSql) {
    SQLStatement sqlStatement = drdsSql.getSqlStatement();
    List<Object> params = drdsSql.getParams();
    MycatCalciteMySqlNodeVisitor mycatCalciteMySqlNodeVisitor = new MycatCalciteMySqlNodeVisitor();
    sqlStatement.accept(mycatCalciteMySqlNodeVisitor);
    SqlNode sqlNode = mycatCalciteMySqlNodeVisitor.getSqlNode();
    CalciteCatalogReader catalogReader = new CalciteCatalogReader(CalciteSchema.from(plus), defaultSchemaName != null ? ImmutableList.of(defaultSchemaName) : ImmutableList.of(), MycatCalciteSupport.TypeFactory, MycatCalciteSupport.INSTANCE.getCalciteConnectionConfig());
    SqlValidator validator = new SqlValidatorImpl(SqlOperatorTables.chain(catalogReader, MycatCalciteSupport.config.getOperatorTable()), catalogReader, MycatCalciteSupport.TypeFactory, MycatCalciteSupport.INSTANCE.getValidatorConfig()) {

        @Override
        protected void inferUnknownTypes(@Nonnull RelDataType inferredType, @Nonnull SqlValidatorScope scope, @Nonnull SqlNode node) {
            if (node != null && node instanceof SqlDynamicParam) {
                RelDataType relDataType = deriveType(scope, node);
                return;
            }
            super.inferUnknownTypes(inferredType, scope, node);
        }

        @Override
        public RelDataType getUnknownType() {
            return super.getUnknownType();
        }

        @Override
        public RelDataType deriveType(SqlValidatorScope scope, SqlNode expr) {
            RelDataType res = resolveDynamicParam(expr);
            if (res == null) {
                return super.deriveType(scope, expr);
            } else {
                return res;
            }
        }

        @Override
        public void validateLiteral(SqlLiteral literal) {
            if (literal.getTypeName() == SqlTypeName.DECIMAL) {
                return;
            }
            super.validateLiteral(literal);
        }

        private RelDataType resolveDynamicParam(SqlNode expr) {
            if (expr != null && expr instanceof SqlDynamicParam) {
                int index = ((SqlDynamicParam) expr).getIndex();
                if (index < params.size()) {
                    Object o = params.get(index);
                    if (o == null) {
                        return super.typeFactory.createUnknownType();
                    } else {
                        SqlTypeName type = null;
                        if (o instanceof String) {
                            type = SqlTypeName.VARCHAR;
                        } else if (o instanceof Number) {
                            type = SqlTypeName.DECIMAL;
                        } else {
                            Clreplaced<?> aClreplaced = o.getClreplaced();
                            for (SqlType value : SqlType.values()) {
                                if (value.clazz == aClreplaced) {
                                    type = SqlTypeName.getNameForJdbcType(value.id);
                                }
                            }
                        }
                        Objects.requireNonNull(type, () -> "unknown type:" + o.getClreplaced());
                        return super.typeFactory.createSqlType(type);
                    }
                }
            }
            return null;
        }

        @Override
        public RelDataType getValidatedNodeType(SqlNode node) {
            RelDataType relDataType = resolveDynamicParam(node);
            if (relDataType == null) {
                return super.getValidatedNodeType(node);
            } else {
                return relDataType;
            }
        }

        @Override
        public CalciteException handleUnresolvedFunction(SqlCall call, SqlFunction unresolvedFunction, List<RelDataType> argTypes, List<String> argNames) {
            return super.handleUnresolvedFunction(call, unresolvedFunction, argTypes, argNames);
        }

        @Override
        protected void addToSelectList(List<SqlNode> list, Set<String> aliases, List<Map.Entry<String, RelDataType>> fieldList, SqlNode exp, SelectScope scope, boolean includeSystemVars) {
            super.addToSelectList(list, aliases, fieldList, exp, scope, includeSystemVars);
        }

        @Override
        protected void validateWhereOrOn(SqlValidatorScope scope, SqlNode condition, String clause) {
            if (!condition.getKind().belongsTo(SqlKind.COMPARISON)) {
                condition = SqlStdOperatorTable.CAST.createCall(SqlParserPos.ZERO, condition, SqlTypeUtil.convertTypeToSpec(typeFactory.createSqlType(SqlTypeName.BOOLEAN)));
            }
            super.validateWhereOrOn(scope, condition, clause);
        }
    };
    SqlNode validated;
    validated = validator.validate(sqlNode);
    RelOptCluster cluster = newCluster();
    RelBuilder relBuilder = MycatCalciteSupport.relBuilderFactory.create(cluster, catalogReader);
    SqlToRelConverter sqlToRelConverter = new SqlToRelConverter(NOOP_EXPANDER, validator, catalogReader, cluster, MycatCalciteSupport.config.getConvertletTable(), MycatCalciteSupport.sqlToRelConverterConfig);
    RelRoot root = sqlToRelConverter.convertQuery(validated, false, true);
    drdsSql.setAliasList(root.fields.stream().map(Pair::getValue).collect(Collectors.toList()));
    final RelRoot root2 = root.withRel(sqlToRelConverter.flattenTypes(root.rel, true));
    return root2.withRel(RelDecorrelator.decorrelateQuery(root.rel, relBuilder)).project();
}

13 Source : SubQueryRemoveRule.java
with Apache License 2.0
from lealone

/**
 * Rewrites a SOME sub-query into a {@link Join}.
 *
 * @param e            SOME sub-query to rewrite
 * @param builder      Builder
 *
 * @return Expression that may be used to replace the RexSubQuery
 */
private RexNode rewriteSome(RexSubQuery e, RelBuilder builder) {
    // Most general case, where the left and right keys might have nulls, and
    // caller requires 3-valued logic return.
    // 
    // select e.deptno, e.deptno < some (select deptno from emp) as v
    // from emp as e
    // 
    // becomes
    // 
    // select e.deptno,
    // case
    // when q.c = 0 then false // sub-query is empty
    // when (e.deptno < q.m) is true then true
    // when q.c > q.d then unknown // sub-query has at least one null
    // else e.deptno < q.m
    // end as v
    // from emp as e
    // cross join (
    // select max(deptno) as m, count(*) as c, count(deptno) as d
    // from emp) as q
    // 
    final SqlQuantifyOperator op = (SqlQuantifyOperator) e.getOperator();
    builder.push(e.getRel()).aggregate(builder.groupKey(), op.comparisonKind == SqlKind.GREATER_THAN || op.comparisonKind == SqlKind.GREATER_THAN_OR_EQUAL ? builder.min("m", builder.field(0)) : builder.max("m", builder.field(0)), builder.count(false, "c"), builder.count(false, "d", builder.field(0))).as("q").join(JoinRelType.INNER);
    return builder.call(SqlStdOperatorTable.CASE, builder.call(SqlStdOperatorTable.EQUALS, builder.field("q", "c"), builder.literal(0)), builder.literal(false), builder.call(SqlStdOperatorTable.IS_TRUE, builder.call(RelOptUtil.op(op.comparisonKind, null), e.getOperands().get(0), builder.field("q", "m"))), builder.literal(true), builder.call(SqlStdOperatorTable.GREATER_THAN, builder.field("q", "c"), builder.field("q", "d")), builder.literal(null), builder.call(RelOptUtil.op(op.comparisonKind, null), e.getOperands().get(0), builder.field("q", "m")));
}

See More Examples