sis-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ama...@apache.org
Subject [sis] 06/22: feat(SQLStore): First draft for conversion of OpenGIS Filter to SQL
Date Thu, 14 Nov 2019 11:46:40 GMT
This is an automated email from the ASF dual-hosted git repository.

amanin pushed a commit to branch refactor/sql-store
in repository https://gitbox.apache.org/repos/asf/sis.git

commit e7a975ac8d577966a37cabe727c4f3f66b50ac2d
Author: Alexis Manin <amanin@apache.org>
AuthorDate: Fri Sep 27 11:58:36 2019 +0200

    feat(SQLStore): First draft for conversion of OpenGIS Filter to SQL
    
    FeatureSet API allow to define programmatically a subset of any dataset.
    In case of SQL datasets, such subset can be computed efficiently by the database engine.
---
 .../apache/sis/filter/DefaultFilterFactory.java    |  53 ++-
 .../java/org/apache/sis/test/sql/TestDatabase.java |  49 ++-
 pom.xml                                            |   2 +
 .../sis/internal/sql/feature/ANSIInterpreter.java  | 423 +++++++++++++++++++++
 .../apache/sis/internal/sql/feature/Analyzer.java  |   2 +-
 .../sis/internal/sql/feature/FeatureAdapter.java   |   7 +-
 .../apache/sis/internal/sql/feature/Features.java  |  50 ++-
 .../sis/internal/sql/feature/QueryBuilder.java     |  18 -
 .../sis/internal/sql/feature/QueryFeatureSet.java  |  69 ++--
 .../sis/internal/sql/feature/SQLQueryAdapter.java  |  53 ++-
 .../sis/internal/sql/feature/SQLQueryBuilder.java  |  28 --
 .../apache/sis/internal/sql/feature/StreamSQL.java |  16 +
 .../org/apache/sis/internal/sql/feature/Table.java |  25 +-
 .../sis/internal/sql/feature/TableSubset.java      |  67 ++++
 .../org/apache/sis/storage/sql/SQLStoreTest.java   | 104 ++++-
 .../apache/sis/internal/storage/SubsetAdapter.java |  35 +-
 .../sis/internal/storage/query/SimpleQuery.java    |  12 +-
 17 files changed, 837 insertions(+), 176 deletions(-)

diff --git a/core/sis-feature/src/main/java/org/apache/sis/filter/DefaultFilterFactory.java b/core/sis-feature/src/main/java/org/apache/sis/filter/DefaultFilterFactory.java
index e74cef6..b1b1197 100644
--- a/core/sis-feature/src/main/java/org/apache/sis/filter/DefaultFilterFactory.java
+++ b/core/sis-feature/src/main/java/org/apache/sis/filter/DefaultFilterFactory.java
@@ -23,19 +23,40 @@ import java.util.List;
 import java.util.Map;
 import java.util.ServiceLoader;
 import java.util.Set;
-import org.apache.sis.internal.feature.FunctionRegister;
+
 import org.opengis.filter.*;
 import org.opengis.filter.capability.*;
-import org.opengis.filter.capability.SpatialOperator;
-import org.opengis.filter.expression.*;
-import org.opengis.filter.identity.*;
-import org.opengis.filter.sort.*;
-import org.opengis.filter.spatial.*;
+import org.opengis.filter.expression.Add;
+import org.opengis.filter.expression.Divide;
+import org.opengis.filter.expression.Expression;
+import org.opengis.filter.expression.Function;
+import org.opengis.filter.expression.Literal;
+import org.opengis.filter.expression.Multiply;
+import org.opengis.filter.expression.PropertyName;
+import org.opengis.filter.expression.Subtract;
+import org.opengis.filter.identity.FeatureId;
+import org.opengis.filter.identity.GmlObjectId;
+import org.opengis.filter.identity.Identifier;
+import org.opengis.filter.sort.SortBy;
+import org.opengis.filter.sort.SortOrder;
+import org.opengis.filter.spatial.BBOX;
+import org.opengis.filter.spatial.Beyond;
+import org.opengis.filter.spatial.Contains;
+import org.opengis.filter.spatial.Crosses;
+import org.opengis.filter.spatial.DWithin;
+import org.opengis.filter.spatial.Disjoint;
+import org.opengis.filter.spatial.Equals;
+import org.opengis.filter.spatial.Intersects;
+import org.opengis.filter.spatial.Overlaps;
+import org.opengis.filter.spatial.Touches;
+import org.opengis.filter.spatial.Within;
 import org.opengis.filter.temporal.*;
 import org.opengis.geometry.Envelope;
 import org.opengis.geometry.Geometry;
 import org.opengis.util.GenericName;
 
+import org.apache.sis.internal.feature.FunctionRegister;
+
 
 /**
  * Default implementation of GeoAPI filter factory for creation of {@link Filter} and {@link Expression} instances.
@@ -65,6 +86,12 @@ public class DefaultFilterFactory implements FilterFactory2 {
         }
     }
 
+    /**
+     * According to OGC Filter encoding v2.0, comparison operators should default to cas sensitive comparison. We'll
+     * use this constant to model it, so it will be easier to change default value is the standard evolves.
+     * Doc reference : OGC 09-026r1 and ISO 19143:2010(E), section 7.7.3.2
+     */
+    private static final boolean DEFAULT_MATCH_CASE = true;
 
     /**
      * Creates a new factory.
@@ -409,7 +436,7 @@ public class DefaultFilterFactory implements FilterFactory2 {
      */
     @Override
     public PropertyIsEqualTo equals(final Expression expression1, final Expression expression2) {
-        return equal(expression1, expression2, true, MatchAction.ANY);
+        return equal(expression1, expression2, DEFAULT_MATCH_CASE, MatchAction.ANY);
     }
 
     /**
@@ -427,7 +454,7 @@ public class DefaultFilterFactory implements FilterFactory2 {
      */
     @Override
     public PropertyIsNotEqualTo notEqual(final Expression expression1, final Expression expression2) {
-        return notEqual(expression1, expression2, true, MatchAction.ANY);
+        return notEqual(expression1, expression2, DEFAULT_MATCH_CASE, MatchAction.ANY);
     }
 
     /**
@@ -445,7 +472,7 @@ public class DefaultFilterFactory implements FilterFactory2 {
      */
     @Override
     public PropertyIsGreaterThan greater(final Expression expression1, final Expression expression2) {
-        return greater(expression1,expression2,false, MatchAction.ANY);
+        return greater(expression1,expression2,DEFAULT_MATCH_CASE, MatchAction.ANY);
     }
 
     /**
@@ -463,7 +490,7 @@ public class DefaultFilterFactory implements FilterFactory2 {
      */
     @Override
     public PropertyIsGreaterThanOrEqualTo greaterOrEqual(final Expression expression1, final Expression expression2) {
-        return greaterOrEqual(expression1, expression2,false, MatchAction.ANY);
+        return greaterOrEqual(expression1, expression2,DEFAULT_MATCH_CASE, MatchAction.ANY);
     }
 
     /**
@@ -481,7 +508,7 @@ public class DefaultFilterFactory implements FilterFactory2 {
      */
     @Override
     public PropertyIsLessThan less(final Expression expression1, final Expression expression2) {
-        return less(expression1, expression2, false, MatchAction.ANY);
+        return less(expression1, expression2, DEFAULT_MATCH_CASE, MatchAction.ANY);
     }
 
     /**
@@ -499,7 +526,7 @@ public class DefaultFilterFactory implements FilterFactory2 {
      */
     @Override
     public PropertyIsLessThanOrEqualTo lessOrEqual(final Expression expression1, final Expression expression2) {
-        return lessOrEqual(expression1, expression2, false, MatchAction.ANY);
+        return lessOrEqual(expression1, expression2, DEFAULT_MATCH_CASE, MatchAction.ANY);
     }
 
     /**
@@ -527,7 +554,7 @@ public class DefaultFilterFactory implements FilterFactory2 {
     public PropertyIsLike like(final Expression expression, final String pattern,
             final String wildcard, final String singleChar, final String escape)
     {
-        return like(expression,pattern,wildcard,singleChar,escape,false);
+        return like(expression,pattern,wildcard,singleChar,escape,DEFAULT_MATCH_CASE);
     }
 
     /**
diff --git a/core/sis-metadata/src/test/java/org/apache/sis/test/sql/TestDatabase.java b/core/sis-metadata/src/test/java/org/apache/sis/test/sql/TestDatabase.java
index a3d0255..9bb02a1 100644
--- a/core/sis-metadata/src/test/java/org/apache/sis/test/sql/TestDatabase.java
+++ b/core/sis-metadata/src/test/java/org/apache/sis/test/sql/TestDatabase.java
@@ -17,23 +17,27 @@
 package org.apache.sis.test.sql;
 
 import java.io.IOException;
-import javax.sql.DataSource;
 import java.sql.Connection;
-import java.sql.Statement;
 import java.sql.ResultSet;
-import java.sql.SQLException;
 import java.sql.SQLDataException;
-import org.postgresql.PGProperty;
-import org.postgresql.ds.PGSimpleDataSource;
-import org.hsqldb.jdbc.JDBCDataSource;
-import org.hsqldb.jdbc.JDBCPool;
-import org.apache.derby.jdbc.EmbeddedDataSource;
+import java.sql.SQLException;
+import java.sql.Statement;
+import javax.sql.DataSource;
+
 import org.apache.sis.internal.metadata.sql.Initializer;
 import org.apache.sis.internal.metadata.sql.ScriptRunner;
 import org.apache.sis.test.TestCase;
 import org.apache.sis.util.Debug;
 
-import static org.junit.Assume.*;
+import org.apache.derby.jdbc.EmbeddedDataSource;
+
+import org.hsqldb.jdbc.JDBCDataSource;
+import org.hsqldb.jdbc.JDBCPool;
+import org.postgresql.PGProperty;
+import org.postgresql.ds.PGSimpleDataSource;
+
+import static org.junit.Assume.assumeFalse;
+import static org.junit.Assume.assumeTrue;
 
 
 /**
@@ -203,29 +207,32 @@ public strictfp class TestDatabase implements AutoCloseable {
          * Current version does not use pooling on the assumption
          * that connections to local host are fast enough.
          */
-        try (Connection c = ds.getConnection()) {
-            try (ResultSet reflect = c.getMetaData().getSchemas(null, schema)) {
-                if (reflect.next()) {
-                    throw new SQLDataException("Schema \"" + schema + "\" already exists in \"" + NAME + "\".");
+        if (create) {
+            try (Connection c = ds.getConnection()) {
+                try (ResultSet reflect = c.getMetaData().getSchemas(null, schema)) {
+                    if (reflect.next()) {
+                        throw new SQLDataException("Schema \"" + schema + "\" already exists in \"" + NAME + "\".");
+                    }
                 }
-            }
-            if (create) {
                 try (Statement s = c.createStatement()) {
                     s.execute("CREATE SCHEMA \"" + schema + '"');
                 }
+
+            } catch (SQLException e) {
+                final String state = e.getSQLState();
+                assumeFalse("This test needs a PostgreSQL server running on the local host.", "08001".equals(state));
+                assumeFalse("This test needs a PostgreSQL database named \"" + NAME + "\".", "3D000".equals(state));
+                throw e;
             }
-        } catch (SQLException e) {
-            final String state = e.getSQLState();
-            assumeFalse("This test needs a PostgreSQL server running on the local host.", "08001".equals(state));
-            assumeFalse("This test needs a PostgreSQL database named \"" + NAME + "\".",  "3D000".equals(state));
-            throw e;
         }
         return new TestDatabase(ds) {
             @Override public void close() throws SQLException {
                 final PGSimpleDataSource ds = (PGSimpleDataSource) source;
                 try (Connection c = ds.getConnection()) {
                     try (Statement s = c.createStatement()) {
-                        s.execute("DROP SCHEMA \"" + ds.getCurrentSchema() + "\" CASCADE");
+                        // Prevents test to hang indefinitely if connections are not properly released in test cases.
+                        s.setQueryTimeout(10);
+                        s.execute("DROP SCHEMA \"" + ds.getCurrentSchema() + "\" CASCADE;");
                     }
                 }
             }
diff --git a/pom.xml b/pom.xml
index 8ea8cd8..178c211 100644
--- a/pom.xml
+++ b/pom.xml
@@ -745,6 +745,8 @@
               <skip>${skipTests}</skip> <!-- When skipping tests, skip also checkstyle verification. -->
               <encoding>${project.build.sourceEncoding}</encoding>
               <consoleOutput>true</consoleOutput>
+              <!-- This flag asks to ignore generated source packages, like JMH compilation artifacts -->
+              <excludes>**/generated/**/*</excludes>
               <checkstyleRules>
                 <module name="Checker">
                   <!--
diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/ANSIInterpreter.java b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/ANSIInterpreter.java
new file mode 100644
index 0000000..48c7d63
--- /dev/null
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/ANSIInterpreter.java
@@ -0,0 +1,423 @@
+package org.apache.sis.internal.sql.feature;
+
+import java.util.List;
+import java.util.function.BooleanSupplier;
+import java.util.function.Supplier;
+import java.util.stream.Collectors;
+
+import org.opengis.filter.*;
+import org.opengis.filter.expression.Add;
+import org.opengis.filter.expression.BinaryExpression;
+import org.opengis.filter.expression.Divide;
+import org.opengis.filter.expression.Expression;
+import org.opengis.filter.expression.ExpressionVisitor;
+import org.opengis.filter.expression.Function;
+import org.opengis.filter.expression.Literal;
+import org.opengis.filter.expression.Multiply;
+import org.opengis.filter.expression.NilExpression;
+import org.opengis.filter.expression.PropertyName;
+import org.opengis.filter.expression.Subtract;
+import org.opengis.filter.spatial.BBOX;
+import org.opengis.filter.spatial.Beyond;
+import org.opengis.filter.spatial.Contains;
+import org.opengis.filter.spatial.Crosses;
+import org.opengis.filter.spatial.DWithin;
+import org.opengis.filter.spatial.Disjoint;
+import org.opengis.filter.spatial.Equals;
+import org.opengis.filter.spatial.Intersects;
+import org.opengis.filter.spatial.Overlaps;
+import org.opengis.filter.spatial.Touches;
+import org.opengis.filter.spatial.Within;
+import org.opengis.filter.temporal.*;
+import org.opengis.util.GenericName;
+import org.opengis.util.LocalName;
+
+import org.apache.sis.util.iso.Names;
+
+import static org.apache.sis.util.ArgumentChecks.ensureNonNull;
+
+/**
+ * Port of Geotk FilterToSQL for an ANSI compliant query builder.
+ *
+ * @implNote For now, we over-use parenthesis to ensure consistent operator priority. In the future, we could evolve
+ * this component to provide more elegant transcription of filter groups.
+ *
+ * No case insensitive support of binary comparison is done.
+ *
+ * TODO: define a set of accepter property names, so any {@link PropertyName} filter refering to non pure SQL property
+ * (like relations) will cause a failure.
+ *
+ * @author Alexis Manin (Geomatys)
+ */
+public class ANSIInterpreter implements FilterVisitor, ExpressionVisitor {
+
+    private final java.util.function.Function<Literal, CharSequence> valueFormatter;
+
+    private final java.util.function.Function<PropertyName, CharSequence> nameFormatter;
+
+    public ANSIInterpreter() {
+        this(ANSIInterpreter::format, ANSIInterpreter::format);
+    }
+
+    public ANSIInterpreter(
+            java.util.function.Function<Literal, CharSequence> valueFormatter,
+            java.util.function.Function<PropertyName, CharSequence> nameFormatter
+    ) {
+        ensureNonNull("Literal value formmatter", valueFormatter);
+        ensureNonNull("Property name formmatter", nameFormatter);
+        this.valueFormatter = valueFormatter;
+        this.nameFormatter = nameFormatter;
+    }
+
+    @Override
+    public CharSequence visitNullFilter(Object extraData) {
+        throw new UnsupportedOperationException("Null filter is not supported.");
+    }
+
+    @Override
+    public Object visit(ExcludeFilter filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(IncludeFilter filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public CharSequence visit(And filter, Object extraData) {
+        return join(filter, " AND ", extraData);
+    }
+
+    @Override
+    public Object visit(Id filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(Not filter, Object extraData) {
+        final CharSequence innerFilter = evaluateMandatory(filter.getFilter(), extraData);
+        return "NOT (" + innerFilter + ")";
+    }
+
+    @Override
+    public Object visit(Or filter, Object extraData) {
+        return join(filter, " OR ", extraData);
+    }
+
+    @Override
+    public Object visit(PropertyIsBetween filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(PropertyIsEqualTo filter, Object extraData) {
+        return joinMatchCase(filter, " = ", extraData);
+    }
+
+    @Override
+    public Object visit(PropertyIsNotEqualTo filter, Object extraData) {
+        return joinMatchCase(filter, " <> ", extraData);
+    }
+
+    @Override
+    public Object visit(PropertyIsGreaterThan filter, Object extraData) {
+        return joinMatchCase(filter, " > ", extraData);
+    }
+
+    @Override
+    public Object visit(PropertyIsGreaterThanOrEqualTo filter, Object extraData) {
+        return joinMatchCase(filter, " >= ", extraData);
+    }
+
+    @Override
+    public Object visit(PropertyIsLessThan filter, Object extraData) {
+        return joinMatchCase(filter, " < ", extraData);
+    }
+
+    @Override
+    public Object visit(PropertyIsLessThanOrEqualTo filter, Object extraData) {
+        return joinMatchCase(filter, " <= ", extraData);
+    }
+
+    @Override
+    public Object visit(PropertyIsLike filter, Object extraData) {
+        // TODO: PostgreSQL extension : ilike
+        ensureMatchCase(filter::isMatchingCase);
+        // TODO: port Geotk
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(PropertyIsNull filter, Object extraData) {
+        return evaluateMandatory(filter.getExpression(), extraData) + " = NULL";
+    }
+
+    @Override
+    public Object visit(PropertyIsNil filter, Object extraData) {
+        return evaluateMandatory(filter.getExpression(), extraData) + " = NULL";
+    }
+
+    @Override
+    public Object visit(BBOX filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(Beyond filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(Contains filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(Crosses filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(Disjoint filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(DWithin filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(Equals filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(Intersects filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(Overlaps filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(Touches filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(Within filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(After filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(AnyInteracts filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(Before filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(Begins filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(BegunBy filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(During filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(EndedBy filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(Ends filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(Meets filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(MetBy filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(OverlappedBy filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(TContains filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(TEquals filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    @Override
+    public Object visit(TOverlaps filter, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 30/09/2019
+    }
+
+    /*
+     * Expression visitor
+     */
+
+    @Override
+    public Object visit(NilExpression expression, Object extraData) {
+        return "NULL";
+    }
+
+    @Override
+    public Object visit(Add expression, Object extraData) {
+        return join(expression, " + ", extraData);
+    }
+
+    @Override
+    public Object visit(Divide expression, Object extraData) {
+        return join(expression, " / ", extraData);
+    }
+
+    @Override
+    public Object visit(Function expression, Object extraData) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 01/10/2019
+    }
+
+    @Override
+    public Object visit(Literal expression, Object extraData) {
+        return valueFormatter.apply(expression);
+    }
+
+    @Override
+    public Object visit(Multiply expression, Object extraData) {
+        return join(expression, " * ", extraData);
+    }
+
+
+    @Override
+    public Object visit(PropertyName expression, Object extraData) {
+        return nameFormatter.apply(expression);
+    }
+
+    @Override
+    public Object visit(Subtract expression, Object extraData) {
+        return join(expression, " - ", extraData);
+    }
+
+    /*
+     * UTILITIES
+     */
+
+    protected static CharSequence format(Literal candidate) {
+        final Object value = candidate == null ? null : candidate.getValue();
+        if (value == null) return "NULL";
+        else if (value instanceof CharSequence) {
+            final String asStr = value.toString();
+            asStr.replace("'", "''");
+            return "'"+asStr+"'";
+        }
+
+        throw new UnsupportedOperationException("Not supported yet: Literal value of type "+value.getClass());
+    }
+
+    /**
+     * Beware ! This implementation is a naïve one, expecting given property name to match exactly SQL database names.
+     * In the future, it would be appreciable to be able to configure a mapper between feature and SQL names.
+     * @param candidate The property name to parse.
+     * @return The SQL representation of the given name.
+     */
+    protected static CharSequence format(PropertyName candidate) {
+        final GenericName pName = Names.parseGenericName(null, ":", candidate.getPropertyName());
+        return pName.getParsedNames().stream()
+                .map(LocalName::toString)
+                .collect(Collectors.joining("\".\"", "\"", "\""));
+    }
+
+    protected CharSequence join(final BinaryLogicOperator filter, String separator, Object extraData) {
+        final List<Filter> subFilters = filter.getChildren();
+        if (subFilters == null || subFilters.isEmpty()) return "";
+        return subFilters.stream()
+                .map(sub -> sub.accept(this, extraData))
+                .filter(ANSIInterpreter::isNonEmptyText)
+                .map( result -> (CharSequence) result)
+                .collect(Collectors.joining(separator, "(", ")"));
+    }
+
+    protected CharSequence joinMatchCase(BinaryComparisonOperator filter, String operator, Object extraData) {
+        ensureMatchCase(filter);
+        return join(filter, operator, extraData);
+    }
+
+    protected CharSequence join(BinaryComparisonOperator candidate, String operator, Object extraData) {
+        return join(candidate::getExpression1, candidate::getExpression2, operator, extraData);
+    }
+
+    protected CharSequence join(BinaryExpression candidate, String operator, Object extraData) {
+        return join(candidate::getExpression1, candidate::getExpression2, operator, extraData);
+    }
+
+    protected CharSequence join(
+            Supplier<Expression> leftOperand,
+            Supplier<Expression> rightOperand,
+            String operator, Object extraData
+    ) {
+        return "("
+                + evaluateMandatory(leftOperand.get(), extraData)
+                + operator
+                + evaluateMandatory(rightOperand.get(), extraData)
+                + ")";
+    }
+
+    protected CharSequence evaluateMandatory(final Filter candidate, Object extraData) {
+        final Object exp = candidate == null ? null : candidate.accept(this, extraData);
+        if (isNonEmptyText(exp)) return (CharSequence) exp;
+        else throw new IllegalArgumentException("Filter evaluate to an empty text: "+candidate);
+    }
+
+    protected CharSequence evaluateMandatory(final Expression candidate, Object extraData) {
+        final Object exp = candidate == null ? null : candidate.accept(this, extraData);
+        if (isNonEmptyText(exp)) return (CharSequence) exp;
+        else throw new IllegalArgumentException("Expression evaluate to an empty text: "+candidate);
+    }
+
+    protected static boolean isNonEmptyText(final Object toCheck) {
+        return toCheck instanceof CharSequence && ((CharSequence) toCheck).length() > 0;
+    }
+
+    private static void ensureMatchCase(BinaryComparisonOperator filter) {
+        ensureMatchCase(filter::isMatchingCase);
+    }
+    private static void ensureMatchCase(BooleanSupplier filter) {
+        if (!filter.getAsBoolean())
+            throw new UnsupportedOperationException("case insensitive match is not defined by ANSI SQL");
+    }
+
+    protected static CharSequence append(CharSequence toAdd, Object extraData) {
+        if (extraData instanceof StringBuilder) return ((StringBuilder) extraData).append(toAdd);
+        return toAdd;
+    }
+}
diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Analyzer.java b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Analyzer.java
index 9348d01..d947ad2 100644
--- a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Analyzer.java
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Analyzer.java
@@ -592,7 +592,7 @@ final class Analyzer {
 
         @Override
         public Optional<GenericName> getName() throws SQLException {
-            return Optional.of(name);
+            return Optional.ofNullable(name);
         }
 
         @Override
diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/FeatureAdapter.java b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/FeatureAdapter.java
index 17a345a..e346571 100644
--- a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/FeatureAdapter.java
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/FeatureAdapter.java
@@ -1,5 +1,6 @@
 package org.apache.sis.internal.sql.feature;
 
+import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.ArrayList;
@@ -26,7 +27,7 @@ class FeatureAdapter {
         this.attributeMappers = Collections.unmodifiableList(new ArrayList<>(attributeMappers));
     }
 
-    Feature read(final ResultSet cursor) throws SQLException {
+    Feature read(final ResultSet cursor, final Connection origin) throws SQLException {
         final Feature result = readAttributes(cursor);
         addImports(result, cursor);
         addExports(result);
@@ -47,11 +48,11 @@ class FeatureAdapter {
         return result;
     }
 
-    List<Feature> prefetch(final int size, final ResultSet cursor) throws SQLException {
+    List<Feature> prefetch(final int size, final ResultSet cursor, final Connection origin) throws SQLException {
         // TODO: optimize by resolving import associations by  batch import fetching.
         final ArrayList<Feature> features = new ArrayList<>(size);
         for (int i = 0 ; i < size && cursor.next() ; i++) {
-            features.add(read(cursor));
+            features.add(read(cursor, origin));
         }
 
         return features;
diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Features.java b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Features.java
index 2950311..5ce2195 100644
--- a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Features.java
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Features.java
@@ -33,12 +33,12 @@ import java.util.Map;
 import java.util.Spliterator;
 import java.util.function.Consumer;
 import java.util.function.Function;
+import java.util.regex.Pattern;
 import java.util.stream.Stream;
 import java.util.stream.StreamSupport;
 
 import org.opengis.feature.Feature;
 import org.opengis.feature.FeatureType;
-import org.opengis.filter.Filter;
 import org.opengis.filter.sort.SortBy;
 import org.opengis.util.GenericName;
 
@@ -65,6 +65,7 @@ final class Features implements Spliterator<Feature> {
      * An empty array of iterators, used when there is no dependency.
      */
     private static final Features[] EMPTY = new Features[0];
+    public static final Pattern WHERE_REGEX = Pattern.compile("^\\s*WHERE\\s+", Pattern.CASE_INSENSITIVE);
 
     /**
      * The type of features to create.
@@ -152,8 +153,6 @@ final class Features implements Spliterator<Feature> {
 
     /**
      * Creates a new iterator over the feature instances.
-     * TODO: This object is far too complicated. A builder of some sort should be used. We should even consider a
-     * third-party tool like JOOQ, which is a great abstraction for SQL query building.
      *
      * @param table             the table for which we are creating an iterator.
      * @param connection        connection to the database.
@@ -169,7 +168,7 @@ final class Features implements Spliterator<Feature> {
      */
     Features(final Table table, final Connection connection, final Collection<ColumnRef> columns,
              final List<Relation> following, final Relation noFollow,
-             boolean distinct, final long offset, final long limit)
+             boolean distinct, final long offset, final long limit, CharSequence where)
              throws SQLException, InternalDataStoreException
     {
         ensureNonEmpty("Columns to fetch", columns);
@@ -269,6 +268,7 @@ final class Features implements Spliterator<Feature> {
          * a possibility that many rows reference the same feature instance.
          */
         sql.append(" FROM ").appendIdentifier(table.name.catalog, table.name.schema, table.name.table);
+        appendWhere(sql, where);
         if (following.isEmpty()) {
             statement = null;
             instances = null;       // A future SIS version could use the map opportunistically if it exists.
@@ -559,20 +559,25 @@ final class Features implements Spliterator<Feature> {
         }
     }
 
-    static class Builder implements QueryBuilder {
+    static class Builder implements StreamSQL.QueryBuilder {
 
         final Table parent;
         long limit, offset;
         SortBy[] sort;
 
+        ColumnRef[] columns;
+
         boolean distinct;
 
+        CharSequence whereClause;
+
         Builder(Table parent) {
             this.parent = parent;
         }
 
-        Builder where(final Filter filter) {
-            throw new UnsupportedOperationException("TODO");
+        Builder where(final CharSequence whereClause) {
+            this.whereClause = whereClause;
+            return this;
         }
 
         Builder sortBy(final SortBy...sorting) {
@@ -581,27 +586,33 @@ final class Features implements Spliterator<Feature> {
             return this;
         }
 
+        Builder setColumns(final ColumnRef... columns) {
+            if (columns == null || columns.length < 1) this.columns = null;
+            else this.columns = Arrays.copyOf(columns, columns.length);
+            return this;
+        }
+
         @Override
-        public QueryBuilder limit(long limit) {
+        public StreamSQL.QueryBuilder limit(long limit) {
             this.limit = limit;
             return this;
         }
 
         @Override
-        public QueryBuilder offset(long offset) {
+        public StreamSQL.QueryBuilder offset(long offset) {
             this.offset = offset;
             return this;
         }
 
         @Override
-        public QueryBuilder distinct(boolean activate) {
+        public StreamSQL.QueryBuilder distinct(boolean activate) {
             this.distinct = activate;
             return this;
         }
 
         @Override
         public Connector select(ColumnRef... columns) {
-            return new TableConnector(this, columns);
+            return new TableConnector(this, columns == null || columns.length < 1 ? this.columns : columns);
         }
     }
 
@@ -613,18 +624,21 @@ final class Features implements Spliterator<Feature> {
 
         final SortBy[] sort;
 
+        final CharSequence where;
+
         TableConnector(Builder source, ColumnRef[] columns) {
             this.source = source;
             this.distinct = source.distinct;
             this.columns = columns;
             this.sort = source.sort == null ? null : Arrays.copyOf(source.sort, source.sort.length);
+            this.where = source.whereClause;
         }
 
         public Stream<Feature> connect(final Connection conn) throws SQLException, DataStoreException {
             final Features features = new Features(
                     source.parent, conn,
                     columns == null || columns.length < 1 ? source.parent.attributes : Arrays.asList(columns),
-                    new ArrayList<>(), null, distinct, source.offset, source.limit
+                    new ArrayList<>(), null, distinct, source.offset, source.limit, where
             );
             return StreamSupport.stream(features, false);
         }
@@ -652,6 +666,7 @@ final class Features implements Spliterator<Feature> {
             if (count) sql.append(')');
             sql.append(" FROM ").appendIdentifier(source.parent.name.catalog, source.parent.name.schema, source.parent.name.table);
 
+            appendWhere(sql, where);
             if (!count && sort != null && sort.length > 0) {
                 sql.append(" ORDER BY ");
                 append(sql, sort[0]);
@@ -665,6 +680,17 @@ final class Features implements Spliterator<Feature> {
         }
     }
 
+    private static void appendWhere(SQLBuilder sql, CharSequence whereClause) {
+        if (whereClause != null) {
+            final String whereStr = whereClause.toString();
+            if (!whereStr.isEmpty()) {
+                sql.append(" ");
+                if (!WHERE_REGEX.matcher(whereStr).find()) sql.append("WHERE ");
+                sql.append(whereStr);
+            }
+        }
+    }
+
     private static void append(SQLBuilder target, SortBy toAppend) {
         target.appendIdentifier(toAppend.getPropertyName().getPropertyName()).append(" ");
         if (toAppend.getSortOrder() != null) target.append(toAppend.getSortOrder().toSQL());
diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/QueryBuilder.java b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/QueryBuilder.java
deleted file mode 100644
index a78ed2a..0000000
--- a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/QueryBuilder.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package org.apache.sis.internal.sql.feature;
-
-/**
- * API to allow overrided SQL Stream to delegate a set of intermediate operations to native driver.
- * TODO: move as inner interface of {@link StreamSQL} ?
- */
-interface QueryBuilder {
-
-    QueryBuilder limit(long limit);
-
-    QueryBuilder offset(long offset);
-
-    default QueryBuilder distinct() { return distinct(true); }
-
-    QueryBuilder distinct(boolean activate);
-
-    Connector select(ColumnRef... columns);
-}
diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/QueryFeatureSet.java b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/QueryFeatureSet.java
index ffbdc8b..6c16b6a 100644
--- a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/QueryFeatureSet.java
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/QueryFeatureSet.java
@@ -20,6 +20,7 @@ import org.apache.sis.internal.metadata.sql.SQLBuilder;
 import org.apache.sis.internal.storage.AbstractFeatureSet;
 import org.apache.sis.storage.DataStoreException;
 import org.apache.sis.util.collection.BackingStoreException;
+import org.apache.sis.util.logging.WarningListeners;
 
 /**
  * Stores SQL query given at built time, and execute it when calling {@link #features(boolean) data stream}. Note that
@@ -126,20 +127,20 @@ public class QueryFeatureSet extends AbstractFeatureSet {
      *                 can use {@link #QueryFeatureSet(SQLBuilder, DataSource, Connection) another constructor}.
      */
     QueryFeatureSet(SQLBuilder queryBuilder, Analyzer analyzer, DataSource source, Connection conn) throws SQLException {
-        super(analyzer.listeners);
-        this.source = source;
+        this(queryBuilder, createAdapter(queryBuilder, analyzer, conn), analyzer.listeners, source, conn);
+    }
 
-        String sql = queryBuilder.toString();
-        try (PreparedStatement statement = conn.prepareStatement(sql)) {
-            final SQLTypeSpecification spec = analyzer.create(statement, sql, null);
-            adapter = analyzer.buildAdapter(spec);
-        }
+    QueryFeatureSet(SQLBuilder queryBuilder, FeatureAdapter adapter, WarningListeners listeners, DataSource source, Connection conn) throws SQLException {
+        super(listeners);
+        this.source = source;
+        this.adapter = adapter;
 
         /* We will now try to parse offset and limit from input query. If we encounter unsupported/ambiguous case,
          * we will fallback to pure java management of additional limit and offset.
          * If we successfully retrieve offset and limit, we'll modify user query to take account of additional
          * parameters given later.
          */
+        String sql = queryBuilder.toString();
         long tmpOffset = 0, tmpLimit = 0;
         try {
             Matcher matcher = OFFSET_PATTERN.matcher(sql);
@@ -205,7 +206,7 @@ public class QueryFeatureSet extends AbstractFeatureSet {
         return new StreamSQL(new QueryAdapter(queryBuilder, parallel), source, parallel);
     }
 
-    private final class QueryAdapter implements QueryBuilder {
+    private final class QueryAdapter implements StreamSQL.QueryBuilder {
 
         private final SQLBuilder source;
         private final boolean parallel;
@@ -220,19 +221,19 @@ public class QueryFeatureSet extends AbstractFeatureSet {
         }
 
         @Override
-        public QueryBuilder limit(long limit) {
+        public StreamSQL.QueryBuilder limit(long limit) {
             additionalLimit = limit;
             return this;
         }
 
         @Override
-        public QueryBuilder offset(long offset) {
+        public StreamSQL.QueryBuilder offset(long offset) {
             additionalOffset = offset;
             return this;
         }
 
         @Override
-        public QueryBuilder distinct(boolean activate) {
+        public StreamSQL.QueryBuilder distinct(boolean activate) {
             if (distinct == activate) return this;
             throw new UnsupportedOperationException("Not supported yet: modifying user query"); // "Alexis Manin (Geomatys)" on 24/09/2019
         }
@@ -260,9 +261,21 @@ public class QueryFeatureSet extends AbstractFeatureSet {
         }
     }
 
+    private static FeatureAdapter createAdapter(SQLBuilder queryBuilder, Analyzer analyzer, Connection conn) throws SQLException {
+        String sql = queryBuilder.toString();
+        try (PreparedStatement statement = conn.prepareStatement(sql)) {
+            final SQLTypeSpecification spec = analyzer.create(statement, sql, null);
+            return analyzer.buildAdapter(spec);
+        }
+    }
+
     private final class PreparedQueryConnector implements Connector {
 
         final String sql;
+        /**
+         * In some cases, detection/modification of SQL offset and limit parameters can fail. In such cases, we amend
+         * result stream with pure java {@link Stream#skip(long) offset} and {@link Stream#limit(long) limit}.
+         */
         private long additionalOffset, additionalLimit;
         private final boolean parallel;
 
@@ -281,7 +294,7 @@ public class QueryFeatureSet extends AbstractFeatureSet {
             final int fetchSize = result.getFetchSize();
             final boolean withPrefetch = !allowBatchLoading || (fetchSize < 1 || fetchSize >= Integer.MAX_VALUE);
             final Spliterator<Feature> spliterator = withPrefetch ?
-                    new ResultSpliterator(result) : new PrefetchSpliterator(result, fetchRatio);
+                    new ResultSpliterator(result, connection) : new PrefetchSpliterator(result, connection, fetchRatio);
             Stream<Feature> stream = StreamSupport.stream(spliterator, parallel && withPrefetch);
             if (additionalLimit > 0) stream = stream.limit(additionalLimit);
             if (additionalOffset > 0) stream = stream.skip(additionalOffset);
@@ -300,8 +313,12 @@ public class QueryFeatureSet extends AbstractFeatureSet {
 
         @Override
         public String estimateStatement(boolean count) {
-            // Require analysis. Things could get complicated if original user query is already a count.
-            throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 24/09/2019
+            if (count) {
+                // We should check if user query is already a count operation, in which case we would return "select 1"
+                throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 24/09/2019
+            } else {
+                return sql;
+            }
         }
     }
 
@@ -319,9 +336,11 @@ public class QueryFeatureSet extends AbstractFeatureSet {
     private abstract class QuerySpliterator  implements java.util.Spliterator<Feature> {
 
         final ResultSet result;
+        final Connection origin;
 
-        private QuerySpliterator(ResultSet result) {
+        private QuerySpliterator(ResultSet result, Connection origin) {
             this.result = result;
+            this.origin = origin;
         }
 
         @Override
@@ -338,15 +357,15 @@ public class QueryFeatureSet extends AbstractFeatureSet {
 
     private final class ResultSpliterator extends QuerySpliterator {
 
-        private ResultSpliterator(ResultSet result) {
-            super(result);
+        private ResultSpliterator(ResultSet result, Connection origin) {
+            super(result, origin);
         }
 
         @Override
         public boolean tryAdvance(Consumer<? super Feature> action) {
             try {
                 if (result.next()) {
-                    final Feature f = adapter.read(result);
+                    final Feature f = adapter.read(result, origin);
                     action.accept(f);
                     return true;
                 } else return false;
@@ -371,7 +390,7 @@ public class QueryFeatureSet extends AbstractFeatureSet {
      * there's not much improvement regarding to naive streaming approach. IMHO, two improvements would really impact
      * performance positively if done:
      * <ul>
-     *     <li>Optimisation of batch loading through {@link FeatureAdapter#prefetch(int, ResultSet)}</li>
+     *     <li>Optimisation of batch loading through {@link FeatureAdapter#prefetch(int, ResultSet, Connection)}</li>
      *     <li>Better splitting balance, as stated by {@link Spliterator#trySplit()}</li>
      * </ul>
      */
@@ -385,14 +404,14 @@ public class QueryFeatureSet extends AbstractFeatureSet {
          * According to {@link Spliterator#trySplit()} documentation, the original size estimation must be reduced after
          * split to remain consistent.
          */
-        long splittedAmount = 0;
+        long splittedAmount;
 
-        private PrefetchSpliterator(ResultSet result) throws SQLException {
-            this(result, 0.5f);
+        private PrefetchSpliterator(ResultSet result, Connection origin) throws SQLException {
+            this(result, origin, 0.5f);
         }
 
-        private PrefetchSpliterator(ResultSet result, float fetchRatio) throws SQLException {
-            super(result);
+        private PrefetchSpliterator(ResultSet result, Connection origin, float fetchRatio) throws SQLException {
+            super(result, origin);
             this.fetchSize = Math.max((int) (result.getFetchSize()*fetchRatio), 1);
         }
 
@@ -430,7 +449,7 @@ public class QueryFeatureSet extends AbstractFeatureSet {
             if (chunk == null || idx >= chunk.size()) {
                 idx = 0;
                 try {
-                    chunk = adapter.prefetch(fetchSize, result);
+                    chunk = adapter.prefetch(fetchSize, result, origin);
                 } catch (SQLException e) {
                     throw new BackingStoreException(e);
                 }
diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/SQLQueryAdapter.java b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/SQLQueryAdapter.java
index c6b3dc4..e471567 100644
--- a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/SQLQueryAdapter.java
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/SQLQueryAdapter.java
@@ -15,42 +15,77 @@ public class SQLQueryAdapter implements SubsetAdapter.AdapterBuilder {
 
     final Table parent;
 
-    private SimpleQuery.Column[] columns;
+    private ColumnRef[] columns;
     private SortBy[] sorting;
 
+    private CharSequence where;
+
     public SQLQueryAdapter(Table parent) {
         this.parent = parent;
     }
 
+    /**
+     * No-op implementation. SQL optimisation is dynamically applied through {@link StreamSQL}.
+     * @param offset The offset to handle.
+     * @return Input offset.
+     */
     @Override
     public long offset(long offset) {
-        return offset; // Done by stream overload
+        return offset;
     }
 
+    /**
+     * No-op implementation. SQL optimisation is dynamically applied through {@link StreamSQL}.
+     * @param limit The limit to handle.
+     * @return Input limit.
+     */
     @Override
     public long limit(long limit) {
-        return limit; // Done by stream overload
+        return limit;
     }
 
     @Override
     public Filter filter(Filter filter) {
-        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 18/09/2019
+        try {
+            final Object result = filter.accept(new ANSIInterpreter(), null);
+            if (ANSIInterpreter.isNonEmptyText(result)) {
+                where = (CharSequence) result;
+                return Filter.INCLUDE;
+            }
+        } catch (UnsupportedOperationException e) {
+            // TODO: log
+            where = null;
+        }
+
+        return filter;
     }
 
     @Override
     public boolean sort(SortBy[] comparison) {
         sorting = Arrays.copyOf(comparison, comparison.length);
-        return true;
+        return false;
     }
 
     @Override
-    public SimpleQuery.Column[] select(List<SimpleQuery.Column> columns) {
-        this.columns = columns.toArray(new SimpleQuery.Column[columns.size()]);
-        return null;
+    public boolean select(List<SimpleQuery.Column> columns) {
+        /* We've got a lot of trouble with current column API. It defines an expression and an alias, which allow to
+         * infer output property type. However, it's very difficult with current methods to infer source columns used
+         * for building output. Note that we could check if column expression is a property name or a literal, but if
+         * any column is not one of those two, it means it uses unknown (for us) SQL columns, so we cannot filter
+         * selected columns safely.
+         */
+        return false;
     }
 
     @Override
     public Optional<FeatureSet> build() {
-        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)" on 18/09/2019
+        if (isNoOp()) return Optional.empty();
+        return Optional.of(new TableSubset(parent, sorting, where));
+    }
+
+    private boolean isNoOp() {
+        return (sorting == null || sorting.length < 1)
+                && (columns == null || columns.length < 1)
+                && (where == null || where.length() < 1);
     }
 }
diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/SQLQueryBuilder.java b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/SQLQueryBuilder.java
deleted file mode 100644
index 55337d7..0000000
--- a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/SQLQueryBuilder.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package org.apache.sis.internal.sql.feature;
-
-import java.sql.Connection;
-import java.sql.DatabaseMetaData;
-import java.sql.SQLException;
-import javax.sql.DataSource;
-
-import org.apache.sis.internal.metadata.sql.SQLBuilder;
-import org.apache.sis.storage.DataStoreException;
-import org.apache.sis.storage.FeatureSet;
-
-public class SQLQueryBuilder extends SQLBuilder {
-
-    final DataSource source;
-
-    public SQLQueryBuilder(DataSource source, final DatabaseMetaData metadata, final boolean quoteSchema) throws SQLException {
-        super(metadata, quoteSchema);
-        this.source = source;
-    }
-
-    public FeatureSet build(final Connection connection) throws SQLException, DataStoreException {
-        final Analyzer analyzer = new Analyzer(source, connection.getMetaData(), null, null);
-        // TODO: defensive copy of this builder.
-        return new QueryFeatureSet(this, analyzer, source, connection);
-    }
-
-
-}
diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/StreamSQL.java b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/StreamSQL.java
index a0e9888..30820a9 100644
--- a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/StreamSQL.java
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/StreamSQL.java
@@ -407,4 +407,20 @@ class StreamSQL extends StreamDecoration<Feature> {
             }
         }
     }
+
+    /**
+     * API to allow overrided SQL Stream to delegate a set of intermediate operations to native driver.
+     */
+    interface QueryBuilder {
+
+        QueryBuilder limit(long limit);
+
+        QueryBuilder offset(long offset);
+
+        default QueryBuilder distinct() { return distinct(true); }
+
+        QueryBuilder distinct(boolean activate);
+
+        Connector select(ColumnRef... columns);
+    }
 }
diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Table.java b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Table.java
index 81b8c75..b60ec46 100644
--- a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Table.java
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Table.java
@@ -29,6 +29,7 @@ import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import javax.sql.DataSource;
 
+import org.opengis.feature.Attribute;
 import org.opengis.feature.AttributeType;
 import org.opengis.feature.Feature;
 import org.opengis.feature.FeatureAssociationRole;
@@ -39,6 +40,7 @@ import org.opengis.util.GenericName;
 import org.apache.sis.internal.metadata.sql.Reflection;
 import org.apache.sis.internal.metadata.sql.SQLBuilder;
 import org.apache.sis.internal.storage.AbstractFeatureSet;
+import org.apache.sis.internal.storage.SubsetAdapter;
 import org.apache.sis.internal.storage.query.SimpleQuery;
 import org.apache.sis.storage.DataStoreException;
 import org.apache.sis.storage.FeatureSet;
@@ -85,7 +87,8 @@ final class Table extends AbstractFeatureSet {
     /**
      * Name of all columns to fetch from database, optionally amended with an alias. Alias is used for feature type
      * attributes which have been renamed to avoid name collisions. In any case, a call to {@link ColumnRef#getAttributeName()}}
-     * will return the name available in target feature type.
+     * will return the name available in target feature type. This list contains only {@link Attribute} names, not any
+     * relation one.
      */
     final List<ColumnRef> attributes;
 
@@ -204,22 +207,12 @@ final class Table extends AbstractFeatureSet {
 
     @Override
     public FeatureSet subset(Query query) throws UnsupportedQueryException, DataStoreException {
-        if (!(query instanceof SimpleQuery)) return super.subset(query);
-        boolean remainingQuery = true;
-        final SimpleQuery q = (SimpleQuery) query;
-        FeatureSet subset = this;
-        final List<SimpleQuery.Column> cols = q.getColumns();
-
-        /**
-         * Once filter has been taken care of, we will be able to check columns to filter. Note that all filters
-         * managed by database engine can use non-returned columns, but it is not the case of remaining ones, which
-         * are applied after feature creation, therefore with only filtered columns accessible.
-         */
-        if (cols != null && !cols.isEmpty()) {
-
+        if (query instanceof SimpleQuery) {
+            final SubsetAdapter subsetAdapter = new SubsetAdapter(fs -> new SQLQueryAdapter(this));
+            return subsetAdapter.subset(this, (SimpleQuery) query);
         }
 
-        return remainingQuery ? subset.subset(q) : subset;
+        return super.subset(query);
     }
 
     /**
@@ -431,6 +424,6 @@ final class Table extends AbstractFeatureSet {
     final Features features(final Connection connection, final List<Relation> following, final Relation noFollow)
             throws SQLException, InternalDataStoreException
     {
-        return new Features(this, connection, attributes, following, noFollow, false, -1, -1);
+        return new Features(this, connection, attributes, following, noFollow, false, -1, -1, null);
     }
 }
diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/TableSubset.java b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/TableSubset.java
new file mode 100644
index 0000000..dc9e427
--- /dev/null
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/TableSubset.java
@@ -0,0 +1,67 @@
+package org.apache.sis.internal.sql.feature;
+
+import java.util.Optional;
+import java.util.stream.Stream;
+
+import org.opengis.feature.Feature;
+import org.opengis.feature.FeatureType;
+import org.opengis.filter.sort.SortBy;
+import org.opengis.geometry.Envelope;
+import org.opengis.metadata.Metadata;
+import org.opengis.util.GenericName;
+
+import org.apache.sis.storage.DataStoreException;
+import org.apache.sis.storage.FeatureSet;
+import org.apache.sis.storage.event.ChangeEvent;
+import org.apache.sis.storage.event.ChangeListener;
+
+public class TableSubset implements FeatureSet {
+
+    final Table parent;
+    final SortBy[] sorting;
+    final CharSequence where;
+
+    public TableSubset(Table parent, SortBy[] sorting, CharSequence where) {
+        this.parent = parent;
+        this.sorting = sorting;
+        this.where = where;
+    }
+
+    @Override
+    public FeatureType getType() throws DataStoreException {
+        return parent.getType();
+    }
+
+    @Override
+    public Stream<Feature> features(boolean parallel) throws DataStoreException {
+        final Features.Builder builder = new Features.Builder(parent)
+                .where(where)
+                .sortBy(sorting);
+        return new StreamSQL(builder, parent.source, parallel);
+    }
+
+    @Override
+    public Optional<Envelope> getEnvelope() throws DataStoreException {
+        return parent.getEnvelope();
+    }
+
+    @Override
+    public Optional<GenericName> getIdentifier() throws DataStoreException {
+        return Optional.empty();
+    }
+
+    @Override
+    public Metadata getMetadata() throws DataStoreException {
+        return parent.getMetadata();
+    }
+
+    @Override
+    public <T extends ChangeEvent> void addListener(ChangeListener<? super T> listener, Class<T> eventType) {
+        parent.addListener(listener, eventType);
+    }
+
+    @Override
+    public <T extends ChangeEvent> void removeListener(ChangeListener<? super T> listener, Class<T> eventType) {
+        parent.removeListener(listener, eventType);
+    }
+}
diff --git a/storage/sis-sqlstore/src/test/java/org/apache/sis/storage/sql/SQLStoreTest.java b/storage/sis-sqlstore/src/test/java/org/apache/sis/storage/sql/SQLStoreTest.java
index 2929194..5142fa1 100644
--- a/storage/sis-sqlstore/src/test/java/org/apache/sis/storage/sql/SQLStoreTest.java
+++ b/storage/sis-sqlstore/src/test/java/org/apache/sis/storage/sql/SQLStoreTest.java
@@ -36,17 +36,21 @@ import org.opengis.feature.Feature;
 import org.opengis.feature.FeatureAssociationRole;
 import org.opengis.feature.FeatureType;
 import org.opengis.feature.PropertyType;
+import org.opengis.filter.sort.SortOrder;
 import org.opengis.util.GenericName;
 
+import org.apache.sis.filter.DefaultFilterFactory;
 import org.apache.sis.internal.feature.AttributeConvention;
 import org.apache.sis.internal.metadata.sql.SQLBuilder;
 import org.apache.sis.internal.sql.feature.QueryFeatureSet;
+import org.apache.sis.internal.storage.query.SimpleQuery;
 import org.apache.sis.storage.DataStoreException;
 import org.apache.sis.storage.FeatureSet;
 import org.apache.sis.storage.StorageConnector;
 import org.apache.sis.test.TestCase;
 import org.apache.sis.test.sql.TestDatabase;
 
+import org.junit.Assert;
 import org.junit.Test;
 
 import static org.apache.sis.test.Assert.assertEquals;
@@ -83,6 +87,8 @@ public final strictfp class SQLStoreTest extends TestCase {
             531902     // Québec,   2016.
     };
 
+    private static final DefaultFilterFactory FF = new DefaultFilterFactory();
+
     /**
      * Number of time that the each country has been seen while iterating over the cities.
      */
@@ -165,7 +171,9 @@ public final strictfp class SQLStoreTest extends TestCase {
                 // Now, we'll check that overloaded stream operations are functionally stable, even stacked.
                 verifyStreamOperations(cities);
 
-                verifyQueries(tmp.source);
+                verifySimpleQueries(store);
+
+                verifySQLQueries(tmp.source);
             }
         }
         assertEquals(Integer.valueOf(2), countryCount.remove("CAN"));
@@ -174,7 +182,55 @@ public final strictfp class SQLStoreTest extends TestCase {
         assertTrue  (countryCount.isEmpty());
     }
 
-    private void verifyQueries(DataSource source) throws Exception {
+    private void verifySimpleQueries(SQLStore dataset) throws Exception {
+        verifySimpleQuerySorting(dataset);
+        verifySimpleWhere(dataset);
+    }
+
+    private void verifySimpleQuerySorting(SQLStore dataset) throws DataStoreException {
+        final FeatureSet parks = (FeatureSet) dataset.findResource("Parks");
+        final SimpleQuery query = new SimpleQuery();
+        query.setColumns(new SimpleQuery.Column(FF.property("english_name")));
+        query.setSortBy(
+                FF.sort("country", SortOrder.DESCENDING),
+                FF.sort("english_name", SortOrder.ASCENDING)
+        );
+        final FeatureSet subset = parks.subset(query);
+        String[] expectedPNames = {"english_name"};
+        try (Stream<Feature> features = subset.features(false)) {
+            final Object[] values = features.map(f -> {
+                final String[] names = f.getType().getProperties(true).stream()
+                        .map(PropertyType::getName)
+                        .map(GenericName::toString)
+                        .toArray(size -> new String[size]);
+                assertArrayEquals(expectedPNames, names);
+                return f.getPropertyValue(expectedPNames[0]);
+            })
+                    .toArray();
+            String[] expectedValues = {"Shinjuku Gyoen", "Yoyogi-kōen", "Luxembourg Garden", "Tuileries Garden", "Mount Royal"};
+            assertArrayEquals("Read values are not sorted as expected.", expectedValues, values);
+        }
+    }
+
+    private void verifySimpleWhere(SQLStore dataset) throws Exception {
+        final SimpleQuery q = new SimpleQuery();
+        q.setSortBy(FF.sort("native_name", SortOrder.ASCENDING));
+        q.setFilter(FF.equals(FF.property("country"), FF.literal("CAN")));
+        final FeatureSet cities = (FeatureSet) dataset.findResource("Cities");
+        final Object[] names;
+        try (Stream<Feature> features = cities.subset(q).features(false)) {
+            names = features.map(f -> f.getPropertyValue("native_name"))
+                    .toArray();
+        }
+
+        Assert.assertArrayEquals(
+                "Filtered cities should only contains Canadian ones",
+                new String[] {"Montréal", "Québec"},
+                names
+        );
+    }
+
+    private void verifySQLQueries(DataSource source) throws Exception {
         verifyFetchCityTableAsQuery(source);
         verifyLimitOffsetAndColumnSelectionFromQuery(source);
         verifyDistinctQuery(source);
@@ -185,7 +241,7 @@ public final strictfp class SQLStoreTest extends TestCase {
         final QueryFeatureSet canadaCities;
         try (Connection conn = source.getConnection()) {
             final SQLBuilder builder = new SQLBuilder(conn.getMetaData(), false)
-                    .append("SELECT * FROM ").appendIdentifier("features", "Cities");
+                    .append("SELECT * FROM ").appendIdentifier(SCHEMA, "Cities");
             allCities = new QueryFeatureSet(builder, source, conn);
             /* By re-using the same builder, we ensure a defensive copy is done at feature set creation, avoiding
              * potential concurrent or security issue due to afterward modification of the query.
@@ -207,17 +263,22 @@ public final strictfp class SQLStoreTest extends TestCase {
         expectedResults.add(city("CAN", "Montréal", "Montreal", 1704694));
         expectedResults.add(city("CAN", "Québec", "Quebec", 531902));
 
-        Set<Map<String, Object>> result = canadaCities.features(false)
-                .map(SQLStoreTest::asMap)
-                .collect(Collectors.toSet());
+        Set<Map<String, Object>> result;
+        try (Stream<Feature> features = canadaCities.features(false)) {
+            result = features
+                    .map(SQLStoreTest::asMap)
+                    .collect(Collectors.toSet());
+        }
         assertEquals("Query result is not consistent with expected one", expectedResults, result);
 
         expectedResults.add(city("FRA", "Paris",    "Paris",    2206488));
         expectedResults.add(city("JPN", "東京",     "Tōkyō",   13622267));
 
-        result = allCities.features(false)
-                .map(SQLStoreTest::asMap)
-                .collect(Collectors.toSet());
+        try (Stream<Feature> features = allCities.features(false)) {
+            result = features
+                    .map(SQLStoreTest::asMap)
+                    .collect(Collectors.toSet());
+        }
         assertEquals("Query result is not consistent with expected one", expectedResults, result);
     }
 
@@ -266,7 +327,7 @@ public final strictfp class SQLStoreTest extends TestCase {
     private void verifyLimitOffsetAndColumnSelectionFromQuery(final DataSource source) throws Exception {
         // Ensure multiline text is accepted
         final String query = "SELECT \"english_name\" as \"title\" \n\r" +
-                "FROM features.\"Parks\" \n" +
+                "FROM "+SCHEMA+".\"Parks\" \n" +
                 "ORDER BY \"english_name\" ASC \n" +
                 "OFFSET 2 ROWS FETCH NEXT 3 ROWS ONLY";
         final QueryFeatureSet qfs;
@@ -286,9 +347,13 @@ public final strictfp class SQLStoreTest extends TestCase {
         assertEquals("Column length constraint should be visible from attribute type.", 20, ((AttributeType)precision).getDefaultValue());
         assertFalse("Built feature type should have exactly one attribute.", props.hasNext());
 
-        Function<Stream<Feature>, String[]> getNames = in -> in
-                .map(f -> f.getPropertyValue("title").toString())
-                .toArray(size -> new String[size]);
+        Function<Stream<Feature>, String[]> getNames = in -> {
+            try (Stream<Feature> closeable = in) {
+                return in
+                        .map(f -> f.getPropertyValue("title").toString())
+                        .toArray(size -> new String[size]);
+            }
+        };
 
         String[] parkNames = getNames.apply(
                 qfs.features(false)
@@ -319,16 +384,19 @@ public final strictfp class SQLStoreTest extends TestCase {
      */
     private void verifyDistinctQuery(DataSource source) throws SQLException {
         // Ensure multiline text is accepted
-        final String query = "SELECT \"country\" FROM features.\"Parks\" ORDER BY \"country\"";
+        final String query = "SELECT \"country\" FROM "+SCHEMA+".\"Parks\" ORDER BY \"country\"";
         final QueryFeatureSet qfs;
         try (Connection conn = source.getConnection()) {
             qfs = new QueryFeatureSet(query, source, conn);
         }
 
-        final Object[] expected = qfs.features(false)
-                .distinct() 
-                .map(f -> f.getPropertyValue("country"))
-                .toArray();
+        final Object[] expected;
+        try (Stream<Feature> features = qfs.features(false)) {
+            expected = features
+                    .distinct()
+                    .map(f -> f.getPropertyValue("country"))
+                    .toArray();
+        }
 
         assertArrayEquals("Distinct country names, sorted in ascending order", new String[]{"CAN", "FRA", "JPN"}, expected);
     }
diff --git a/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/SubsetAdapter.java b/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/SubsetAdapter.java
index 08e4310..737d6d6 100644
--- a/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/SubsetAdapter.java
+++ b/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/SubsetAdapter.java
@@ -33,14 +33,20 @@ public final class SubsetAdapter {
         final long limit = query.getLimit();
         if (limit != UNLIMITED) remaining.setLimit(driver.limit(limit));
 
-        if (filteringRequired(query)) remaining.setFilter(driver.filter(query.getFilter()));
+        if (filteringRequired(query)) {
+            final Filter baseFilter = query.getFilter();
+            try {
+                final Filter remainingFilter = driver.filter(baseFilter);
+                remaining.setFilter(remainingFilter);
+            } catch (UnsupportedOperationException e) {
+                remaining.setFilter(baseFilter);
+            }
+        }
 
         if (sortRequired(query) && !driver.sort(query.getSortBy())) remaining.setSortBy(query.getSortBy());
 
-        if (!allColumnsIncluded(query)) {
-            final SimpleQuery.Column[] remainingCols = driver.select(query.getColumns());
-            if (remainingCols != null && remainingCols.length > 0)
-                remaining.setColumns(remainingCols);
+        if (!allColumnsIncluded(query) && !driver.select(query.getColumns())) {
+            remaining.setColumns(query.getColumns().toArray(new SimpleQuery.Column[0]));
         }
 
         final FeatureSet driverSubset = driver.build().orElse(source);
@@ -103,6 +109,17 @@ public final class SubsetAdapter {
          */
         long limit(long limit);
 
+        /**
+         *
+         * @param filter User entity selection criteria.
+         * @return According to driver possibility, one of the following result:
+         * <ul>
+         *     <li>no optimisation is applicable, give back filter received as input.</li>
+         *     <li>All expressed operators can be handled internally, return {@link Filter#INCLUDE}</li>
+         *     <li>If a part of input filter is manageable, give back a new filter completing driver intern filtering
+         *     to get result matching source filter.</li>
+         * </ul>
+         */
         Filter filter(final Filter filter);
 
         /**
@@ -116,10 +133,12 @@ public final class SubsetAdapter {
 
         /**
          * Specify a subset of columns to return to the driver.
-         * @param columns The columns
-         * @return
+         * @param columns The columns to fetch in result set. Neither null nor empty list accepted.
+         * @return True if underlying driver can entirely manage column selection. False otherwise, meaning that column
+         * selection won't be done, or only partially, and a fallback filter must be applied over driver feature set to
+         * ensure proper selection.
          */
-        SimpleQuery.Column[] select(List<SimpleQuery.Column> columns);
+        boolean select(List<SimpleQuery.Column> columns);
 
         /**
          * Take a snapshot of all parameters given to query adaptation.
diff --git a/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/query/SimpleQuery.java b/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/query/SimpleQuery.java
index 271e62c..598a338 100644
--- a/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/query/SimpleQuery.java
+++ b/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/query/SimpleQuery.java
@@ -119,11 +119,15 @@ public class SimpleQuery extends Query {
      */
     @SuppressWarnings("AssignmentToCollectionOrArrayFieldFromParameter")
     public void setColumns(Column... columns) {
-        columns = columns.clone();
-        for (int i=0; i<columns.length; i++) {
-            ArgumentChecks.ensureNonNullElement("columns", i, columns[i]);
+        if (columns == null || columns.length < 1) {
+            this.columns = null;
+        } else {
+            columns = columns.clone();
+            for (int i = 0; i < columns.length; i++) {
+                ArgumentChecks.ensureNonNullElement("columns", i, columns[i]);
+            }
+            this.columns = columns;
         }
-        this.columns = columns;
     }
 
     /**


Mime
View raw message