sis-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ama...@apache.org
Subject [sis] 10/45: WIP(SQL-Store): start adding abstractions to allow custom SQL queries
Date Tue, 12 Nov 2019 16:44:37 GMT
This is an automated email from the ASF dual-hosted git repository.

amanin pushed a commit to branch refactor/sql-store
in repository https://gitbox.apache.org/repos/asf/sis.git

commit e8dbbb428ad7cd28474055f0ee9cfac39c51b3b0
Author: Alexis Manin <amanin@apache.org>
AuthorDate: Thu Sep 19 18:03:46 2019 +0200

    WIP(SQL-Store): start adding abstractions to allow custom SQL queries
---
 .../apache/sis/internal/sql/feature/Analyzer.java  | 147 +++++++++++++++++++--
 .../apache/sis/internal/sql/feature/Features.java  |  83 +++++++++---
 .../sis/internal/sql/feature/QueryFeatureSet.java  |  82 ++++++++++++
 .../sis/internal/sql/feature/SQLQueryAdapter.java  |  56 ++++++++
 .../apache/sis/internal/sql/feature/StreamSQL.java |  30 +----
 .../org/apache/sis/internal/sql/feature/Table.java |  35 +++++
 .../apache/sis/internal/storage/SubsetAdapter.java | 131 ++++++++++++++++++
 .../sis/internal/storage/query/SimpleQuery.java    |  20 +--
 8 files changed, 525 insertions(+), 59 deletions(-)

diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Analyzer.java
b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Analyzer.java
index a24d969..b221086 100644
--- a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Analyzer.java
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Analyzer.java
@@ -16,31 +16,38 @@
  */
 package org.apache.sis.internal.sql.feature;
 
-import java.util.Set;
-import java.util.Map;
+import java.sql.DatabaseMetaData;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.Collection;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.Iterator;
 import java.util.LinkedHashSet;
-import java.util.Collection;
-import java.util.Collections;
 import java.util.Locale;
+import java.util.Map;
 import java.util.Objects;
+import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.LogRecord;
 import javax.sql.DataSource;
-import java.sql.SQLException;
-import java.sql.DatabaseMetaData;
-import java.sql.ResultSet;
-import org.opengis.util.NameSpace;
-import org.opengis.util.NameFactory;
+
+import org.opengis.feature.FeatureType;
 import org.opengis.util.GenericName;
+import org.opengis.util.NameFactory;
+import org.opengis.util.NameSpace;
+
+import org.apache.sis.feature.builder.FeatureTypeBuilder;
 import org.apache.sis.internal.metadata.sql.Dialect;
+import org.apache.sis.internal.metadata.sql.Reflection;
 import org.apache.sis.internal.metadata.sql.SQLUtilities;
 import org.apache.sis.internal.system.DefaultFactories;
-import org.apache.sis.storage.sql.SQLStore;
 import org.apache.sis.storage.DataStore;
 import org.apache.sis.storage.DataStoreException;
 import org.apache.sis.storage.InternalDataStoreException;
+import org.apache.sis.storage.sql.SQLStore;
 import org.apache.sis.util.logging.WarningListeners;
 import org.apache.sis.util.resources.ResourceInternationalString;
 
@@ -312,4 +319,124 @@ final class Analyzer {
         }
         return tables.values();
     }
+
+    public FeatureType buildFeatureType(final ResultSetMetaData target) {
+        throw new UnsupportedOperationException("");
+    }
+
+    private FeatureType build(final SQLTypeSpecification spec) throws SQLException {
+        final FeatureTypeBuilder builder = new FeatureTypeBuilder(nameFactory, functions.library,
locale);
+        builder.setName(spec.getName());
+        builder.setDefinition(spec.getDefinition());
+        while  (spec.hasNext()) {
+            final SQLColumnSpecification col = spec.next();
+            functions.toJavaType(col.getType(), col.getName());
+
+        }
+
+        throw new UnsupportedOperationException();
+    }
+
+    private interface SQLTypeSpecification extends Iterator<SQLColumnSpecification>
{
+        GenericName getName() throws SQLException;
+        String getDefinition() throws SQLException;
+    }
+
+    private interface SQLColumnSpecification {
+        int getType() throws SQLException;
+        String getName() throws SQLException;
+    }
+
+    private class TableMetadata implements SQLTypeSpecification, AutoCloseable {
+
+        final TableReference id;
+        final ResultSet reflect;
+        private final String tableEsc;
+        private final String schemaEsc;
+
+        private TableMetadata(TableReference source) throws SQLException {
+            this.id = source;
+            tableEsc = escape(source.table);
+            schemaEsc = escape(source.schema);
+            reflect = metadata.getColumns(source.catalog, schemaEsc, tableEsc, null);
+        }
+
+        @Override
+        public GenericName getName() throws SQLException {
+            return id.getName(Analyzer.this);
+        }
+
+        @Override
+        public String getDefinition() throws SQLException {
+            String remarks = id.freeText;
+            if (id instanceof Relation) {
+                try (ResultSet reflect = metadata.getTables(id.catalog, schemaEsc, tableEsc,
null)) {
+                    while (reflect.next()) {
+                        remarks = getUniqueString(reflect, Reflection.REMARKS);
+                        if (remarks != null) {
+                            remarks = remarks.trim();
+                            if (remarks.isEmpty()) {
+                                remarks = null;
+                            } else break;
+                        }
+                    }
+                }
+            }
+            return remarks;
+        }
+
+        @Override
+        public boolean hasNext() {
+            throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin
(Geomatys)" on 19/09/2019
+        }
+
+        @Override
+        public SQLColumnSpecification next() {
+            throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin
(Geomatys)" on 19/09/2019
+        }
+
+        @Override
+        public void close() throws SQLException {
+            reflect.close();
+        }
+    }
+
+    private class TableColumn implements SQLColumnSpecification {
+
+        final ResultSet reflect;
+
+        private TableColumn(ResultSet reflect) {
+            this.reflect = reflect;
+        }
+
+        @Override
+        public int getType() throws SQLException {
+            return reflect.getInt(Reflection.DATA_TYPE);
+        }
+
+        @Override
+        public String getName() throws SQLException {
+            return reflect.getString(Reflection.TYPE_NAME);
+        }
+    }
+
+    private class QueryColumn implements SQLColumnSpecification {
+        final int idx;
+        final ResultSetMetaData source;
+
+        private QueryColumn(int idx, ResultSetMetaData source) {
+            this.idx = idx;
+            this.source = source;
+        }
+
+        @Override
+        public int getType() throws SQLException {
+            return source.getColumnType(idx);
+        }
+
+        @Override
+        public String getName() throws SQLException {
+            return source.getColumnName(idx);
+        }
+    }
 }
diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Features.java
b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Features.java
index a596e7b..c343e70 100644
--- a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Features.java
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Features.java
@@ -24,6 +24,7 @@ import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -35,6 +36,8 @@ import java.util.function.Function;
 
 import org.opengis.feature.Feature;
 import org.opengis.feature.FeatureType;
+import org.opengis.filter.Filter;
+import org.opengis.filter.sort.SortBy;
 
 import org.apache.sis.internal.metadata.sql.SQLBuilder;
 import org.apache.sis.storage.DataStoreException;
@@ -46,9 +49,6 @@ import org.apache.sis.util.collection.WeakValueHashMap;
 import static org.apache.sis.util.ArgumentChecks.ensureNonEmpty;
 import static org.apache.sis.util.ArgumentChecks.ensureNonNull;
 
-// Branch-dependent imports
-
-
 /**
  * Iterator over feature instances.
  *
@@ -159,7 +159,7 @@ final class Features implements Spliterator<Feature> {
      * @param following         the relations that we are following. Used for avoiding never
ending loop.
      * @param noFollow          relation to not follow, or {@code null} if none.
      * @param distinct          True if we should return only distinct result, false otherwise.
-     * @param offset            An offset (nuber of rows to skip) in underlying SQL query.
A negative or zeero value
+     * @param offset            An offset (number of rows to skip) in underlying SQL query.
A negative or zero value
      *                          means no offset will be set.
      * @param limit             Maximum number of rows to return. Corresponds to a LIMIT
statement in underlying SQL
      *                          query. A negative or 0 value means no limit will be set.
@@ -183,7 +183,7 @@ final class Features implements Spliterator<Feature> {
         /*
          * Create a SELECT clause with all columns that are ordinary attributes. Order matter,
since 'Features'
          * iterator will map the columns to the attributes listed in the 'attributeNames'
array in that order.
-         * Moreover, we optionaly add a "distinct" clause on user request.
+         * Moreover, we optionally add a "distinct" clause on user request.
          */
         final SQLBuilder sql = new SQLBuilder(metadata, true).append("SELECT");
         if (distinct) sql.append(" DISTINCT");
@@ -566,41 +566,92 @@ final class Features implements Spliterator<Feature> {
 
         final Table parent;
         long limit, offset;
-        boolean distinct;
+        SortBy[] sort;
 
         Builder(Table parent) {
             this.parent = parent;
         }
 
+        Connector distinct(ColumnRef... columns) {
+            return select(true, columns);
+        }
+
+        Connector select(boolean distinct, ColumnRef... columns) {
+            return new Connector(this, distinct, columns);
+        }
+
+        Builder where(final Filter filter) {
+            throw new UnsupportedOperationException("TODO");
+        }
+
+        Builder sortBy(final SortBy...sorting) {
+            if (sorting == null || sorting.length < 1) this.sort = null;
+            else this.sort = Arrays.copyOf(sorting, sorting.length);
+            return this;
+        }
+    }
+
+    public static final class Connector {
+        final Builder source;
+
+        final boolean distinct;
+        final ColumnRef[] columns;
+
+        final SortBy[] sort;
+
+        public Connector(Builder source, boolean distinct, ColumnRef[] columns) {
+            this.source = source;
+            this.distinct = distinct;
+            this.columns = columns;
+            this.sort = Arrays.copyOf(source.sort, source.sort.length);
+        }
+
+        public Features connect(final Connection conn) throws SQLException, DataStoreException
{
+            return new Features(
+                    source.parent, conn,
+                    columns == null || columns.length < 1? source.parent.attributes :
Arrays.asList(columns),
+                    new ArrayList<>(), null, distinct, source.offset, source.limit
+            );
+        }
+
         /**
          * Warning : This does not work with relations. It is only a rough estimation of
the parameterized query.
          * @param count True if a count query must be generated. False for a simple selection.
          * @return A text representing (roughly) the SQL query which will be posted.
          * @throws SQLException If we cannot initialize an sql statement builder.
          */
-        String getSnapshot(final boolean count) throws SQLException {
-            final SQLBuilder sql = new SQLBuilder(parent.dbMeta, true).append("SELECT ");
+        String getSnapshot(final boolean count) {
+            final SQLBuilder sql = source.parent.createStatement().append("SELECT ");
             if (count) sql.append("COUNT(");
             if (distinct) sql.append("DISTINCT ");
             // If we want a count and no distinct clause is specified, we can query it for
a single column.
-            if (count && !distinct) sql.appendIdentifier(parent.attributes.get(0).name);
+            if (count && !distinct) sql.appendIdentifier(source.parent.attributes.get(0).name);
             else {
-                final Iterator<ColumnRef> it = parent.attributes.iterator();
+                final Iterator<ColumnRef> it = source.parent.attributes.iterator();
                 sql.appendIdentifier(it.next().name);
                 while (it.hasNext()) {
-                    sql.append(',').appendIdentifier(it.next().name);
+                    sql.append(", ").appendIdentifier(it.next().name);
                 }
             }
 
             if (count) sql.append(')');
-            sql.append(" FROM ").appendIdentifier(parent.name.catalog, parent.name.schema,
parent.name.table);
-            addOffsetLimit(sql, offset, limit);
+            sql.append(" FROM ").appendIdentifier(source.parent.name.catalog, source.parent.name.schema,
source.parent.name.table);
+
+            if (!count && sort != null && sort.length > 0) {
+                sql.append(" ORDER BY ");
+                append(sql, sort[0]);
+                for (int i = 1 ; i < sort.length ; i++)
+                    append(sql.append(", "), sort[i]);
+            }
+
+            addOffsetLimit(sql, source.offset, source.limit);
 
             return sql.toString();
         }
+    }
 
-        Features build(final Connection conn) throws SQLException, DataStoreException {
-            return new Features(parent, conn, parent.attributes, new ArrayList<>(),
null, distinct, offset, limit);
-        }
+    private static void append(SQLBuilder target, SortBy toAppend) {
+        target.appendIdentifier(toAppend.getPropertyName().getPropertyName()).append(" ");
+        if (toAppend.getSortOrder() != null) target.append(toAppend.getSortOrder().toSQL());
     }
 }
diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/QueryFeatureSet.java
b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/QueryFeatureSet.java
new file mode 100644
index 0000000..7c1679a
--- /dev/null
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/QueryFeatureSet.java
@@ -0,0 +1,82 @@
+package org.apache.sis.internal.sql.feature;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.stream.Stream;
+import javax.sql.DataSource;
+
+import org.opengis.feature.Feature;
+import org.opengis.feature.FeatureType;
+
+import org.apache.sis.internal.metadata.sql.SQLBuilder;
+import org.apache.sis.internal.storage.AbstractFeatureSet;
+import org.apache.sis.storage.DataStoreException;
+
+public class QueryFeatureSet extends AbstractFeatureSet {
+
+    /**
+     * Keep builder to allow native limit and offset through stream operation.
+     */
+    private final SQLBuilder queryBuilder;
+
+    private final Analyzer analyzer;
+
+    private final DataSource source;
+    private final FeatureType resultType;
+
+    public QueryFeatureSet(SQLBuilder queryBuilder, Analyzer analyzer, DataSource source)
throws DataStoreException {
+        super(analyzer.listeners);
+        this.queryBuilder = queryBuilder;
+        this.analyzer = analyzer;
+        this.source = source;
+
+        try (Connection conn = connectReadOnly(source)) {
+            final PreparedStatement statement = conn.prepareStatement(queryBuilder.toString());
+            final ResultSetMetaData rmd = statement.getMetaData();
+            resultType = analyzer.buildFeatureType(rmd);
+        } catch (SQLException e) {
+            throw new DataStoreException("Cannot analyze query metadata (feature type determination)",
e);
+        }
+    }
+
+    /**
+     * Acquire a connection over parent database, forcing a few parameters to ensure optimal
read performance and
+     * limiting user rights :
+     * <ul>
+     *     <li>{@link Connection#setAutoCommit(boolean) auto-commit} to false</li>
+     *     <li>{@link Connection#setReadOnly(boolean) querying read-only}</li>
+     * </ul>
+     *
+     * @param source Database pointer to create connection from.
+     * @return A new connection to database, with deactivated auto-commit.
+     * @throws SQLException If we cannot create a new connection. See {@link DataSource#getConnection()}
for details.
+     */
+    public static Connection connectReadOnly(final DataSource source) throws SQLException
{
+        final Connection c = source.getConnection();
+        try {
+            c.setTransactionIsolation(Connection.TRANSACTION_NONE);
+            c.setAutoCommit(false);
+            c.setReadOnly(true);
+        } catch (SQLException e) {
+            try {
+                c.close();
+            } catch (RuntimeException|SQLException bis) {
+                e.addSuppressed(bis);
+            }
+            throw e;
+        }
+        return c;
+    }
+
+    @Override
+    public FeatureType getType() {
+        return resultType;
+    }
+
+    @Override
+    public Stream<Feature> features(boolean parallel) throws DataStoreException {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)"
on 19/09/2019
+    }
+}
diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/SQLQueryAdapter.java
b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/SQLQueryAdapter.java
new file mode 100644
index 0000000..c6b3dc4
--- /dev/null
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/SQLQueryAdapter.java
@@ -0,0 +1,56 @@
+package org.apache.sis.internal.sql.feature;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Optional;
+
+import org.opengis.filter.Filter;
+import org.opengis.filter.sort.SortBy;
+
+import org.apache.sis.internal.storage.SubsetAdapter;
+import org.apache.sis.internal.storage.query.SimpleQuery;
+import org.apache.sis.storage.FeatureSet;
+
+public class SQLQueryAdapter implements SubsetAdapter.AdapterBuilder {
+
+    final Table parent;
+
+    private SimpleQuery.Column[] columns;
+    private SortBy[] sorting;
+
+    public SQLQueryAdapter(Table parent) {
+        this.parent = parent;
+    }
+
+    @Override
+    public long offset(long offset) {
+        return offset; // Done by stream overload
+    }
+
+    @Override
+    public long limit(long limit) {
+        return limit; // Done by stream overload
+    }
+
+    @Override
+    public Filter filter(Filter filter) {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)"
on 18/09/2019
+    }
+
+    @Override
+    public boolean sort(SortBy[] comparison) {
+        sorting = Arrays.copyOf(comparison, comparison.length);
+        return true;
+    }
+
+    @Override
+    public SimpleQuery.Column[] select(List<SimpleQuery.Column> columns) {
+        this.columns = columns.toArray(new SimpleQuery.Column[columns.size()]);
+        return null;
+    }
+
+    @Override
+    public Optional<FeatureSet> build() {
+        throw new UnsupportedOperationException("Not supported yet"); // "Alexis Manin (Geomatys)"
on 18/09/2019
+    }
+}
diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/StreamSQL.java
b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/StreamSQL.java
index 7a295b2..1768c63 100644
--- a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/StreamSQL.java
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/StreamSQL.java
@@ -38,8 +38,6 @@ import java.util.stream.LongStream;
 import java.util.stream.Stream;
 import java.util.stream.StreamSupport;
 
-import javax.sql.DataSource;
-
 import org.opengis.feature.Feature;
 
 import org.apache.sis.internal.util.DoubleStreamDecoration;
@@ -67,6 +65,8 @@ class StreamSQL extends StreamDecoration<Feature> {
     final Features.Builder queryBuilder;
     boolean parallel;
 
+    private boolean distinct;
+
     private Consumer<? super Feature> peekAction;
 
     StreamSQL(final Table source) {
@@ -111,7 +111,7 @@ class StreamSQL extends StreamDecoration<Feature> {
 
     @Override
     public Stream<Feature> distinct() {
-        queryBuilder.distinct = true;
+        distinct = true;
         return this;
     }
 
@@ -144,12 +144,7 @@ class StreamSQL extends StreamDecoration<Feature> {
     @Override
     public long count() {
         // Avoid opening a connection if sql text cannot be evaluated.
-        final String sql;
-        try {
-            sql = queryBuilder.getSnapshot(true);
-        } catch (SQLException e) {
-            throw new BackingStoreException("Cannot create SQL COUNT query", e);
-        }
+        final String sql = queryBuilder.select(distinct).getSnapshot(true);
         try (Connection conn = queryBuilder.parent.source.getConnection()) {
             try (Statement st = conn.createStatement();
                  ResultSet rs = st.executeQuery(sql)) {
@@ -165,12 +160,12 @@ class StreamSQL extends StreamDecoration<Feature> {
     @Override
     protected synchronized Stream<Feature> createDecoratedStream() {
         final AtomicReference<Connection> connectionRef = new AtomicReference<>();
-        Stream<Feature> featureStream = Stream.of(uncheck(this::connectNoAuto))
+        Stream<Feature> featureStream = Stream.of(uncheck(() -> QueryFeatureSet.connectReadOnly(queryBuilder.parent.source)))
                 .map(Supplier::get)
                 .peek(connectionRef::set)
                 .flatMap(conn -> {
                     try {
-                        final Features iter = queryBuilder.build(conn);
+                        final Features iter = queryBuilder.select(distinct).connect(conn);
                         return StreamSupport.stream(iter, parallel);
                     } catch (SQLException | DataStoreException e) {
                         throw new BackingStoreException(e);
@@ -182,19 +177,6 @@ class StreamSQL extends StreamDecoration<Feature> {
     }
 
     /**
-     * Acquire a connection over {@link Table parent table} database, forcing
-     * {@link Connection#setAutoCommit(boolean) auto-commit} to false.
-     *
-     * @return A new connection to {@link Table parent table} database, with deactivated
auto-commit.
-     * @throws SQLException If we cannot create a new connection. See {@link DataSource#getConnection()}
for details.
-     */
-    private Connection connectNoAuto() throws SQLException {
-        final Connection conn = queryBuilder.parent.source.getConnection();
-        conn.setAutoCommit(false);
-        return conn;
-    }
-
-    /**
      * Transform a callable into supplier by catching any potential verified exception and
rethrowing it as a {@link BackingStoreException}.
      * @param generator The callable to use in a non-verified error context. Must not be
null.
      * @param <T> The return type of input callable.
diff --git a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Table.java
b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Table.java
index 5a9c555..3bffd90 100644
--- a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Table.java
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Table.java
@@ -48,10 +48,14 @@ import org.apache.sis.internal.metadata.sql.Reflection;
 import org.apache.sis.internal.metadata.sql.SQLBuilder;
 import org.apache.sis.internal.metadata.sql.SQLUtilities;
 import org.apache.sis.internal.storage.AbstractFeatureSet;
+import org.apache.sis.internal.storage.query.SimpleQuery;
 import org.apache.sis.internal.util.CollectionsExt;
 import org.apache.sis.storage.DataStoreContentException;
 import org.apache.sis.storage.DataStoreException;
+import org.apache.sis.storage.FeatureSet;
 import org.apache.sis.storage.InternalDataStoreException;
+import org.apache.sis.storage.Query;
+import org.apache.sis.storage.UnsupportedQueryException;
 import org.apache.sis.util.CharSequences;
 import org.apache.sis.util.Classes;
 import org.apache.sis.util.Debug;
@@ -145,6 +149,12 @@ final class Table extends AbstractFeatureSet {
     final DatabaseMetaData dbMeta;
 
     /**
+     * An SQL builder whose sole purpose is to allow creation of new builders without metadata
analysis. It allows to
+     * reduce error eventuality, and re-use already  computed information.
+     */
+    private final SQLBuilder sqlTemplate;
+
+    /**
      * Creates a description of the table of the given name.
      * The table is identified by {@code id}, which contains a (catalog, schema, name) tuple.
      * The catalog and schema parts are optional and can be null, but the table is mandatory.
@@ -159,6 +169,7 @@ final class Table extends AbstractFeatureSet {
     {
         super(analyzer.listeners);
         this.dbMeta = analyzer.metadata;
+        this.sqlTemplate = new SQLBuilder(this.dbMeta, true);
         this.source = analyzer.source;
         this.name   = id;
         final String tableEsc  = analyzer.escape(id.table);
@@ -418,6 +429,26 @@ final class Table extends AbstractFeatureSet {
         this.attributes = Collections.unmodifiableList(attributes);
     }
 
+    @Override
+    public FeatureSet subset(Query query) throws UnsupportedQueryException, DataStoreException
{
+        if (!(query instanceof SimpleQuery)) return super.subset(query);
+        boolean remainingQuery = true;
+        final SimpleQuery q = (SimpleQuery) query;
+        FeatureSet subset = this;
+        final List<SimpleQuery.Column> cols = q.getColumns();
+
+        /**
+         * Once filter has been taken care of, we will be able to check columns to filter.
Note that all filters
+         * managed by database engine can use non-returned columns, but it is not the case
of remaining ones, which
+         * are applied after feature creation, therefore with only filtered columns accessible.
+         */
+        if (cols != null && !cols.isEmpty()) {
+
+        }
+
+        return remainingQuery? subset.subset(q) : subset;
+    }
+
     /**
      * Returns the given relations as an array, or {@code null} if none.
      */
@@ -594,6 +625,10 @@ final class Table extends AbstractFeatureSet {
         return count;
     }
 
+    public SQLBuilder createStatement() {
+        return new SQLBuilder(sqlTemplate);
+    }
+
     /**
      * Returns a stream of all features contained in this dataset.
      *
diff --git a/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/SubsetAdapter.java
b/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/SubsetAdapter.java
new file mode 100644
index 0000000..08e4310
--- /dev/null
+++ b/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/SubsetAdapter.java
@@ -0,0 +1,131 @@
+package org.apache.sis.internal.storage;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.function.Function;
+
+import org.opengis.filter.Filter;
+import org.opengis.filter.sort.SortBy;
+
+import org.apache.sis.internal.storage.query.SimpleQuery;
+import org.apache.sis.storage.FeatureSet;
+
+import static org.apache.sis.internal.storage.query.SimpleQuery.UNLIMITED;
+
+public final class SubsetAdapter {
+
+    final Function<FeatureSet, AdapterBuilder> driverSupplier;
+
+    public SubsetAdapter(Function<FeatureSet, AdapterBuilder> driverSupplier) {
+        this.driverSupplier = driverSupplier;
+    }
+
+    public final FeatureSet subset(final FeatureSet source, SimpleQuery query) {
+        final AdapterBuilder driver = driverSupplier.apply(source);
+
+        final SimpleQuery remaining = new SimpleQuery();
+
+        final long offset = query.getOffset();
+        if (offset > 0) remaining.setOffset(driver.offset(offset));
+
+        final long limit = query.getLimit();
+        if (limit != UNLIMITED) remaining.setLimit(driver.limit(limit));
+
+        if (filteringRequired(query)) remaining.setFilter(driver.filter(query.getFilter()));
+
+        if (sortRequired(query) && !driver.sort(query.getSortBy())) remaining.setSortBy(query.getSortBy());
+
+        if (!allColumnsIncluded(query)) {
+            final SimpleQuery.Column[] remainingCols = driver.select(query.getColumns());
+            if (remainingCols != null && remainingCols.length > 0)
+                remaining.setColumns(remainingCols);
+        }
+
+        final FeatureSet driverSubset = driver.build().orElse(source);
+
+        return isNoOp(remaining)? driverSubset : remaining.execute(driverSubset);
+    }
+
+    protected final static boolean isNoOp(final SimpleQuery in) {
+        return in.getOffset() <= 0
+                && in.getLimit() == UNLIMITED
+                && allColumnsIncluded(in)
+                && !filteringRequired(in)
+                && !sortRequired(in);
+    }
+
+    protected final static boolean sortRequired(final SimpleQuery in) {
+        final SortBy[] sortBy = in.getSortBy();
+        return sortBy != null && sortBy.length > 0 && Arrays.stream(sortBy).anyMatch(Objects::nonNull);
+    }
+
+    protected final static boolean allColumnsIncluded(final SimpleQuery in) {
+        final List<SimpleQuery.Column> cols = in.getColumns();
+        return cols == null || cols.isEmpty();
+    }
+
+    protected final static boolean filteringRequired(SimpleQuery in) {
+        final Filter filter = in.getFilter();
+        return filter != Filter.INCLUDE;
+    }
+
+    public interface AdapterBuilder {
+
+        /**
+         * Specify an offset to use in custom query.
+         *
+         * @param offset The offset to handle.
+         * @return 0 if this builder can handle completely given offset. The input value
if underlying driver cannot
+         * manage the offset itself. Note that you can return another value in case the driver
and default query system
+         * must be stacked. Imagine the case of a partitioned storage, Maybe the driver can
handle fixed offsets, and
+         * let default implementation managed remaining part of the offset downstream. For
example, in a storage where
+         * features are chunked 10 by 10, when querying an offset of 12, the inner driver
can configure the second
+         * partition to be loaded (element 10 to 20), and let default query skip 2 elements
after that.
+         *
+         * @throws IllegalArgumentException If given value is illegal for the driver.
+         */
+        long offset(long offset);
+
+        /**
+         * Set a maximum number of elements to retrieve from custom query.
+         *
+         * @param limit The count of features to handle.
+         * @return {@link SimpleQuery#UNLIMITED} if this builder can handle completely given
limit. The input value if
+         * underlying driver cannot do it itself, or must be stacked with default query system.
Imagine the case of a
+         * partitioned storage, Maybe the driver can load entire chunks of data, and let
default implementation cut last
+         * returned chunk. For example, in a storage where features are chunked 10 by 10,
when querying a limit of 12,
+         * the inner driver can return two complete partitions (20 elements), and let default
query stop processing
+         * after 12 elements have gone through.
+         *
+         * @throws IllegalArgumentException If given value is illegal for the driver.
+         */
+        long limit(long limit);
+
+        Filter filter(final Filter filter);
+
+        /**
+         * Submit a sort subquery to the driver.
+         *
+         * @param comparison The columns to sort, as specified in {{@link SimpleQuery#getSortBy()}}.
+         * @return True if driver handles the comparison. If false, it means that driver
won't perform any sort, and the
+         * default implementation (i.e {@link SimpleQuery} must handle it.
+         */
+        boolean sort(final SortBy[] comparison);
+
+        /**
+         * Specify a subset of columns to return to the driver.
+         * @param columns The columns
+         * @return
+         */
+        SimpleQuery.Column[] select(List<SimpleQuery.Column> columns);
+
+        /**
+         * Take a snapshot of all parameters given to query adaptation.
+         *
+         * @return A custom driver query. If custom query is a no-op, returns an empty shell.
+         */
+        Optional<FeatureSet> build();
+    }
+}
diff --git a/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/query/SimpleQuery.java
b/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/query/SimpleQuery.java
index 17d6912..271e62c 100644
--- a/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/query/SimpleQuery.java
+++ b/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/query/SimpleQuery.java
@@ -20,6 +20,16 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Objects;
 import java.util.StringJoiner;
+
+import org.opengis.feature.AttributeType;
+import org.opengis.feature.FeatureAssociationRole;
+import org.opengis.feature.FeatureType;
+import org.opengis.feature.PropertyType;
+import org.opengis.filter.Filter;
+import org.opengis.filter.expression.Expression;
+import org.opengis.filter.sort.SortBy;
+import org.opengis.util.GenericName;
+
 import org.apache.sis.feature.builder.FeatureTypeBuilder;
 import org.apache.sis.internal.feature.FeatureExpression;
 import org.apache.sis.internal.util.UnmodifiableArrayList;
@@ -29,14 +39,6 @@ import org.apache.sis.util.ArgumentChecks;
 import org.apache.sis.util.Classes;
 import org.apache.sis.util.iso.Names;
 import org.apache.sis.util.resources.Errors;
-import org.opengis.feature.AttributeType;
-import org.opengis.feature.FeatureAssociationRole;
-import org.opengis.feature.FeatureType;
-import org.opengis.feature.PropertyType;
-import org.opengis.filter.Filter;
-import org.opengis.filter.expression.Expression;
-import org.opengis.filter.sort.SortBy;
-import org.opengis.util.GenericName;
 
 
 /**
@@ -54,7 +56,7 @@ public class SimpleQuery extends Query {
      * Sentinel limit value for queries of unlimited length.
      * This value can be given to {@link #setLimit(long)} or retrieved from {@link #getLimit()}.
      */
-    private static final long UNLIMITED = -1;
+    public static final long UNLIMITED = -1;
 
     /**
      * The columns to retrieve, or {@code null} if all columns shall be included in the query.


Mime
View raw message