Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,9 @@
* <p>### Query Generation
* <p>Options for query generation.
* <p>{@docTable:queryGeneration}
* <p>### Query Processing
* <p>Options for query processing.
* <p>{@docTable:queryProcessing}
* <p>### Source Path Defaults
* <p>Defaults for the path expressions in `sourcePath`, also see [Source Path
* Syntax](#path-syntax).
Expand Down Expand Up @@ -266,6 +269,9 @@
* <p>### Query-Generierung
* <p>Optionen für die Query-Generierung in `queryGeneration`.
* <p>{@docTable:queryGeneration}
* <p>### Query-Verarbeitung
* <p>Optionen für die Query-Verarbeitung in `queryProcessing`.
* <p>{@docTable:queryProcessing}
* <p>### SQL-Pfad-Defaults
* <p>Defaults für die Pfad-Ausdrücke in `sourcePath`, siehe auch
* [SQL-Pfad-Syntax](#path-syntax).
Expand Down Expand Up @@ -363,6 +369,8 @@
* @ref:sourcePathDefaults {@link de.ii.xtraplatform.features.sql.domain.ImmutableSqlPathDefaults}
* @ref:queryGeneration {@link
* de.ii.xtraplatform.features.sql.domain.ImmutableQueryGeneratorSettings}
* @ref:queryProcessing {@link
* de.ii.xtraplatform.features.sql.domain.ImmutableQueryProcessorSettings}
* @ref:datasetChanges2 {@link
* de.ii.xtraplatform.features.sql.domain.FeatureProviderSqlData.DatasetChangeSettings}
*/
Expand Down Expand Up @@ -403,6 +411,13 @@
@DocStep(type = Step.JSON_PROPERTIES)
},
columnSet = ColumnSet.JSON_PROPERTIES),
@DocTable(
name = "queryProcessing",
rows = {
@DocStep(type = Step.TAG_REFS, params = "{@ref:queryProcessing}"),
@DocStep(type = Step.JSON_PROPERTIES)
},
columnSet = ColumnSet.JSON_PROPERTIES),
},
vars = {
@DocVar(
Expand Down Expand Up @@ -1467,6 +1482,14 @@ public boolean supportsIsNull() {
return true;
}

@Override
public boolean skipUnusedPipelineSteps() {
if (Objects.nonNull(getData().getQueryProcessing())) {
return getData().getQueryProcessing().getSkipUnusedPipelineSteps();
}
return false;
}

@Override
public FeatureSchema getQueryablesSchema(
FeatureSchema schema,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,16 @@ public interface FeatureProviderSqlData
@Nullable
QueryGeneratorSettings getQueryGeneration();

/**
* @langEn Options for query processing, for details see [Query
* Processing](10-sql.md#query-processing) below.
* @langDe Einstellungen für die Query-Verarbeitung, für Details siehe
* [Query-Verarbeitung](10-sql.md#query-processing).
*/
@DocMarker("specific")
@Nullable
QueryProcessorSettings getQueryProcessing();

// for json ordering
@Override
BuildableMap<FeatureSchema, ImmutableFeatureSchema.Builder> getTypes();
Expand Down Expand Up @@ -211,6 +221,33 @@ default boolean getGeometryAsWkb() {
}
}

@Value.Immutable
@JsonDeserialize(builder = ImmutableQueryProcessorSettings.Builder.class)
interface QueryProcessorSettings {

/**
* @langEn Skip unused pipeline steps in the feature stream processing. If set to true, steps
* that are not required to fulfil the request (e.g. coordinate processing, if no coordinate
* transformation or specific coordinate precision is needed) are skipped. This can improve
* performance depending on the query and the capabilities used in the feature provider. For
* now the default is `false`, but the default may change to `true`, if experience shows
* that the option does not have side effects.
* @langDe Überspringen Sie nicht verwendete Pipeline-Schritte in der
* Feature-Stream-Verarbeitung. Wenn diese Option auf `true` gesetzt ist, werden Schritte
* übersprungen, die zur Erfüllung der Query nicht erforderlich sind (z. B.
* Koordinatenverarbeitung, wenn keine Koordinatentransformation oder bestimmte
* Koordinatengenauigkeit erforderlich ist). Dies kann die Leistung je nach Query und den im
* Feature-Provider verwendeten Möglichkeiten verbessern. Derzeit ist die
* Standardeinstellung `false`, aber die Standardeinstellung kann sich zu `true` ändern,
* wenn die Erfahrung zeigt, dass die Option keine Nebenwirkungen hat.
* @default false
*/
@Value.Default
default boolean getSkipUnusedPipelineSteps() {
return false;
}
}

@Value.Check
default FeatureProviderSqlData migrateAssumeExternalChanges() {
if (Objects.isNull(getDatasetChanges())
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,171 @@
/*
* Copyright 2025 interactive instruments GmbH
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package de.ii.xtraplatform.features.domain;

import com.google.common.collect.ImmutableSet;
import de.ii.xtraplatform.crs.domain.EpsgCrs;
import de.ii.xtraplatform.crs.domain.OgcCrs;
import de.ii.xtraplatform.features.domain.FeatureStream.PipelineSteps;
import de.ii.xtraplatform.features.domain.SchemaBase.Type;
import de.ii.xtraplatform.features.domain.transform.PropertyTransformation;
import de.ii.xtraplatform.features.domain.transform.PropertyTransformations;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;

public class DeterminePipelineStepsThatCannotBeSkipped
implements SchemaVisitorTopDown<FeatureSchema, Set<PipelineSteps>> {

private final EpsgCrs nativeCrs;
private final EpsgCrs targetCrs;
private final TypeQuery query;
private final Optional<PropertyTransformations> propertyTransformations;
private final boolean simplifyGeometries;
private final boolean deriveMetadataFromContent;
private final boolean requiresPropertiesInSequence;
private final boolean supportSecondaryGeometry;
private final boolean distinguishNullAndMissing;
private final String featureType;

public DeterminePipelineStepsThatCannotBeSkipped(
TypeQuery query,
String featureType,
Optional<PropertyTransformations> propertyTransformations,
EpsgCrs nativeCrs,
EpsgCrs targetCrs,
boolean deriveMetadataFromContent,
boolean requiresPropertiesInSequence,
boolean supportSecondaryGeometry,
boolean distinguishNullAndMissing,
boolean simplifyGeometries) {
this.query = query;
this.propertyTransformations = propertyTransformations;
this.nativeCrs = nativeCrs;
this.targetCrs = targetCrs;
this.deriveMetadataFromContent = deriveMetadataFromContent;
this.requiresPropertiesInSequence = requiresPropertiesInSequence;
this.supportSecondaryGeometry = supportSecondaryGeometry;
this.distinguishNullAndMissing = distinguishNullAndMissing;
this.featureType = featureType;
this.simplifyGeometries = simplifyGeometries;
}

@Override
public Set<PipelineSteps> visit(
FeatureSchema schema,
List<FeatureSchema> parents,
List<Set<PipelineSteps>> visitedProperties) {
ImmutableSet.Builder<PipelineSteps> steps = ImmutableSet.builder();

if (parents.isEmpty()) {
// at the root level: aggregate information from properties and test global settings

// coordinate processing is needed if a target CRS differs from the native CRS or geometries
// are simplified
if (!targetCrs.equals(nativeCrs)
|| (simplifyGeometries)
|| (!(OgcCrs.CRS84.equals(nativeCrs) || OgcCrs.CRS84h.equals(nativeCrs))
&& supportSecondaryGeometry
&& schema.isSecondaryGeometry())) {
steps.add(PipelineSteps.COORDINATES);
}

// metadata processing (extents, etag) is needed only if the response is not sent as a stream
if (deriveMetadataFromContent) {
steps.add(PipelineSteps.METADATA, PipelineSteps.ETAG);
}

// aggregate information from visited properties
visitedProperties.forEach(steps::addAll);

// post-process special cases
Set<PipelineSteps> intermediateResult = steps.build();

// include transformations from the feature provider as in the feature stream
PropertyTransformations mergedTransformations =
FeatureStreamImpl.getPropertyTransformations(
Map.of(featureType, schema), query, propertyTransformations);

// if null values are not removed, cleaning is not needed
if (intermediateResult.contains(PipelineSteps.CLEAN)
&& (mergedTransformations.hasTransformation(
PropertyTransformations.WILDCARD, pt -> !pt.getRemoveNullValues().orElse(true))
|| !distinguishNullAndMissing)) {
steps = ImmutableSet.builder();
intermediateResult.stream().filter(s -> s != PipelineSteps.CLEAN).forEach(steps::add);
}

// mapping is also needed, if specific property transformations are applied (the ones with a
// wildcard are handled otherwise: nulls are removed in the CLEAN step and flattening is
// already handled by including MAPPING for any objects or arrays);
// if only value transformations are applied, and no other mapping is needed, just execute
// the value transformations, but skip schema transformations and token slice transformers
if (!intermediateResult.contains(PipelineSteps.MAPPING_SCHEMA)) {
if (requiresPropertiesInSequence) {
steps.add(PipelineSteps.MAPPING_SCHEMA);
steps.add(PipelineSteps.MAPPING_VALUES);
} else {
List<PropertyTransformation> transformations =
mergedTransformations.getTransformations().entrySet().stream()
.filter(entry -> !PropertyTransformations.WILDCARD.equals(entry.getKey()))
.map(Entry::getValue)
.flatMap(Collection::stream)
.toList();
if (!transformations.isEmpty()) {
if (transformations.stream()
.allMatch(PropertyTransformation::onlyValueTransformations)) {
steps.add(PipelineSteps.MAPPING_VALUES);
} else {
steps.add(PipelineSteps.MAPPING_SCHEMA);
steps.add(PipelineSteps.MAPPING_VALUES);
}
}
}
} else {
steps.add(PipelineSteps.MAPPING_VALUES);
}

} else {
// at property level: determine needed steps based on schema information

// mapping is needed for any complex schema: concat/coalesce/merge, an array/object, or use of
// a sub-decoder
if (!schema.getConcat().isEmpty()
|| !schema.getCoalesce().isEmpty()
|| !schema.getMerge().isEmpty()
|| schema.isArray()
|| schema.isObject()
|| schema
.getSourcePath()
.filter(sourcePath -> sourcePath.matches(".+?\\[[^=\\]]+].+"))
.isPresent()) {
steps.add(PipelineSteps.MAPPING_SCHEMA);
}

// geometry processing is needed for geometries with constraints that require special handling
// to upgrade the geometry type
if (schema.getType() == Type.GEOMETRY
&& schema
.getConstraints()
.filter(constraints -> constraints.isClosed() || constraints.isComposite())
.isPresent()) {
steps.add(PipelineSteps.GEOMETRY);
}

// unless all properties are required, cleaning maybe needed to remove null values
if (schema.getConstraints().filter(SchemaConstraints::isRequired).isEmpty()) {
steps.add(PipelineSteps.CLEAN);
}
}

return steps.build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,10 @@ default boolean supportsIsNull() {
return false;
}

default boolean skipUnusedPipelineSteps() {
return false;
}

default FeatureStream getFeatureStream(FeatureQuery query) {
throw new UnsupportedOperationException();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@
public interface FeatureStream {

enum PipelineSteps {
MAPPING,
MAPPING_SCHEMA,
MAPPING_VALUES,
GEOMETRY,
COORDINATES,
CLEAN,
Expand Down
Loading