sis-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From desruisse...@apache.org
Subject [sis] 01/02: Merge branch 'geoapi-3.1' into master. The main work is the completion of sis-sqlstore module.
Date Thu, 19 Jul 2018 07:31:06 GMT
This is an automated email from the ASF dual-hosted git repository.

desruisseaux pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/sis.git

commit 7ad54a9bb9ae1205d905120beedde4ef5c072745
Merge: 969b7d0 e044c13
Author: Martin Desruisseaux <martin.desruisseaux@geomatys.com>
AuthorDate: Thu Jul 19 09:23:09 2018 +0200

    Merge branch 'geoapi-3.1' into master. The main work is the completion of sis-sqlstore module.

 NOTICE                                             |   2 +-
 core/sis-build-helper/pom.xml                      |   8 +-
 .../java/org/apache/sis/feature/FeatureFormat.java |   9 +-
 .../apache/sis/feature/StringJoinOperation.java    |   3 +-
 .../feature/builder/AssociationRoleBuilder.java    |   2 +-
 .../sis/feature/builder/AttributeTypeBuilder.java  |   4 +-
 .../sis/feature/builder/FeatureTypeBuilder.java    |  20 +-
 .../sis/feature/builder/PropertyTypeBuilder.java   |   2 +-
 .../apache/sis/feature/builder/TypeBuilder.java    |   9 +-
 core/sis-metadata/pom.xml                          |  10 +
 .../apache/sis/internal/metadata/sql/Dialect.java  |   3 +-
 .../sis/internal/metadata/sql/Reflection.java      | 216 +++++++
 .../sis/internal/metadata/sql/SQLBuilder.java      |  35 +-
 .../sis/internal/metadata/sql/SQLUtilities.java    |   5 +-
 .../sis/internal/metadata/sql/ScriptRunner.java    |   4 +-
 .../iso/maintenance/AttributeTypeAdapter.java      |   4 +-
 .../iso/maintenance/DefaultScopeDescription.java   |   2 +-
 .../iso/maintenance/FeatureTypeAdapter.java        |   4 +-
 .../{LegacyType.java => LegacyFeatureType.java}    |  17 +-
 .../apache/sis/metadata/sql/MetadataSource.java    |   3 +-
 .../apache/sis/metadata/sql/MetadataWriter.java    |  11 +-
 .../apache/sis/metadata/sql/TableHierarchy.java    |   6 +-
 .../org/apache/sis/util/iso/GlobalNameSpace.java   |   2 +-
 .../main/java/org/apache/sis/util/iso/Names.java   |  25 +-
 .../main/java/org/apache/sis/xml/Namespaces.java   |  14 +-
 .../internal/jaxb/cat/CodeListMarshallingTest.java |   4 +-
 .../sis/internal/jaxb/cat/EnumMarshallingTest.java |   4 +-
 .../sis/internal/jaxb/gco/PropertyTypeTest.java    |   8 +-
 .../sis/internal/jaxb/gml/TimePeriodTest.java      |   4 +-
 .../internal/jaxb/lan/FreeTextMarshallingTest.java |   4 +-
 .../sis/internal/jaxb/lan/LanguageCodeTest.java    |   4 +-
 .../metadata/replace/ServiceParameterTest.java     |   4 +-
 .../internal/metadata/sql/ScriptRunnerTest.java    |  16 +-
 .../sis/internal/metadata/sql/TestDatabase.java    | 119 ----
 ...aratorTest.java => DocumentComparatorTest.java} |  20 +-
 .../apache/sis/internal/xml/XmlUtilitiesTest.java  |   4 +-
 .../sis/metadata/iso/CustomMetadataTest.java       |   4 +-
 .../sis/metadata/iso/DefaultIdentifierTest.java    |   4 +-
 .../sis/metadata/iso/DefaultMetadataTest.java      |   4 +-
 .../sis/metadata/iso/ImmutableIdentifierTest.java  |   4 +-
 .../metadata/iso/citation/DefaultContactTest.java  |   4 +-
 .../iso/citation/DefaultResponsibilityTest.java    |   4 +-
 .../constraint/DefaultLegalConstraintsTest.java    |   4 +-
 .../identification/DefaultBrowseGraphicTest.java   |   4 +-
 .../DefaultRepresentativeFractionTest.java         |   4 +-
 .../iso/identification/DefaultResolutionTest.java  |   4 +-
 .../metadata/iso/lineage/DefaultLineageTest.java   |   4 +-
 .../sis/metadata/sql/IdentifierGeneratorTest.java  |  13 +-
 .../sis/metadata/sql/MetadataSourceTest.java       |  12 +-
 .../sis/metadata/sql/MetadataWriterTest.java       |  49 +-
 .../org/apache/sis/metadata/xml/TestUsingFile.java |   4 +-
 .../java/org/apache/sis/test/MetadataAssert.java   |  15 +-
 .../org/apache/sis/test/mock/package-info.java     |   2 +-
 .../java/org/apache/sis/test/sql/TestDatabase.java | 269 +++++++++
 .../org/apache/sis/test}/sql/package-info.java     |  11 +-
 .../apache/sis/test/suite/MetadataTestSuite.java   |   2 +-
 .../DocumentComparator.java}                       |  30 +-
 .../test/{XMLTestCase.java => xml/TestCase.java}   |  12 +-
 .../org/apache/sis/test/xml}/package-info.java     |  14 +-
 .../apache/sis/util/iso/NameMarshallingTest.java   |   4 +-
 .../sis/xml/CharSequenceSubstitutionTest.java      |   4 +-
 .../apache/sis/xml/NilReasonMarshallingTest.java   |   4 +-
 .../org/apache/sis/xml/ReferenceResolverMock.java  |   2 +-
 .../org/apache/sis/xml/UUIDMarshallingTest.java    |   4 +-
 .../org/apache/sis/xml/XLinkMarshallingTest.java   |   4 +-
 .../java/org/apache/sis/parameter/Parameters.java  |   2 +
 .../referencing/factory/sql/EPSGDataAccess.java    |   2 +-
 .../sis/referencing/factory/sql/SQLTranslator.java |   9 +-
 .../CC_GeneralOperationParameterTest.java          |   4 +-
 .../CC_OperationParameterGroupTest.java            |   4 +-
 .../referencing/SecondDefiningParameterTest.java   |   4 +-
 .../sis/parameter/ParameterMarshallingTest.java    |   4 +-
 .../referencing/crs/DefaultCompoundCRSTest.java    |   4 +-
 .../sis/referencing/crs/DefaultDerivedCRSTest.java |   4 +-
 .../referencing/crs/DefaultEngineeringCRSTest.java |   4 +-
 .../referencing/crs/DefaultGeodeticCRSTest.java    |   4 +-
 .../sis/referencing/crs/DefaultImageCRSTest.java   |   4 +-
 .../referencing/crs/DefaultProjectedCRSTest.java   |   4 +-
 .../sis/referencing/cs/DefaultCartesianCSTest.java |   4 +-
 .../referencing/cs/DefaultEllipsoidalCSTest.java   |   4 +-
 .../referencing/datum/DefaultEllipsoidTest.java    |   4 +-
 .../datum/DefaultGeodeticDatumTest.java            |   4 +-
 .../datum/DefaultPrimeMeridianTest.java            |   4 +-
 .../datum/DefaultTemporalDatumTest.java            |   4 +-
 .../datum/DefaultVerticalDatumTest.java            |   4 +-
 .../referencing/factory/sql/EPSGInstallerTest.java |  43 +-
 .../factory/sql/epsg/DataScriptFormatter.java      |  10 +-
 .../DefaultConcatenatedOperationTest.java          |   4 +-
 .../operation/DefaultPassThroughOperationTest.java |   4 +-
 .../operation/SingleOperationMarshallingTest.java  |   4 +-
 .../apache/sis/test/integration/MetadataTest.java  |   8 +-
 .../apache/sis/internal/util/CollectionsExt.java   |  22 +
 .../sis/util/resources/IndexedResourceBundle.java  |   2 +-
 .../resources/ResourceInternationalString.java     |  27 +-
 .../org/apache/sis/util/resources/Vocabulary.java  |   5 +
 .../sis/util/resources/Vocabulary.properties       |   1 +
 .../sis/util/resources/Vocabulary_fr.properties    |   1 +
 .../apache/sis/util/resources/package-info.java    |   2 +-
 ide-project/NetBeans/build.xml                     |   3 +
 ide-project/NetBeans/nbproject/project.properties  |   5 +-
 pom.xml                                            |  12 +-
 .../profile/fra/DataIdentificationTest.java        |   4 +-
 .../profile/fra/DirectReferenceSystemTest.java     |   4 +-
 storage/pom.xml                                    |   4 +-
 .../storage/earthobservation/LandsatReader.java    |  23 +-
 .../earthobservation/LandsatReaderTest.java        |   6 +
 .../apache/sis/storage/geotiff/Compression.java    |  28 +-
 .../apache/sis/storage/geotiff/GeoTiffStore.java   |   1 +
 .../apache/sis/storage/netcdf/AttributeNames.java  |  19 +-
 .../apache/sis/storage/netcdf/MetadataReader.java  |  69 +--
 .../org/apache/sis/storage/netcdf/NetcdfStore.java |  48 +-
 .../sis/storage/netcdf/MetadataReaderTest.java     |   8 +-
 .../java/org/apache/sis/storage/sql/SQLQuery.java  |  72 ---
 .../java/org/apache/sis/storage/sql/SQLStore.java  |  73 ---
 storage/{sis-sql => sis-sqlstore}/pom.xml          |  25 +-
 .../apache/sis/internal/sql/feature/Analyzer.java  | 329 +++++++++++
 .../apache/sis/internal/sql/feature/Database.java  | 237 ++++++++
 .../apache/sis/internal/sql/feature/Features.java  | 499 ++++++++++++++++
 .../apache/sis/internal/sql/feature/Relation.java  | 419 ++++++++++++++
 .../apache/sis/internal/sql/feature/Resources.java | 211 +++++++
 .../sis/internal/sql/feature/Resources.properties  |  28 +
 .../internal/sql/feature/Resources_fr.properties   |  33 ++
 .../sis/internal/sql/feature/SpatialFunctions.java | 144 +++++
 .../org/apache/sis/internal/sql/feature/Table.java | 624 +++++++++++++++++++++
 .../sis/internal/sql/feature/TableReference.java   | 152 +++++
 .../sis/internal/sql/feature/package-info.java}    |  21 +-
 .../java/org/apache/sis/storage/sql/SQLStore.java  | 245 ++++++++
 .../apache/sis/storage/sql/SQLStoreProvider.java   | 231 ++++++++
 .../org/apache/sis/storage/sql/package-info.java   |   7 +-
 .../org/apache/sis/storage/sql/SQLStoreTest.java   | 270 +++++++++
 .../org/apache/sis/test/suite/SQLTestSuite.java}   |  29 +-
 .../org/apache/sis/test/suite/package-info.txt     |   0
 .../org/apache/sis/storage/sql/Features.sql        |  69 +++
 .../sis/internal/storage/AbstractFeatureSet.java   |   2 +-
 .../sis/internal/storage/MetadataBuilder.java      | 139 ++++-
 .../org/apache/sis/internal/storage/csv/Store.java |   4 +-
 .../java/org/apache/sis/storage/Aggregate.java     |   2 +-
 .../sis/storage/InternalDataStoreException.java    |  71 +++
 138 files changed, 4787 insertions(+), 707 deletions(-)

diff --cc core/sis-feature/src/main/java/org/apache/sis/feature/builder/FeatureTypeBuilder.java
index ba883a2,04fb43b..cc456e5
--- a/core/sis-feature/src/main/java/org/apache/sis/feature/builder/FeatureTypeBuilder.java
+++ b/core/sis-feature/src/main/java/org/apache/sis/feature/builder/FeatureTypeBuilder.java
@@@ -641,9 -648,11 +655,9 @@@ public class FeatureTypeBuilder extend
       * @param  name   name of the property to search.
       * @return property of the given name, or {@code null} if none.
       * @throws IllegalArgumentException if the given name is ambiguous.
 -     *
 -     * @see #addProperty(PropertyType)
       */
      public PropertyTypeBuilder getProperty(final String name) {
-         return forName(properties, name);
+         return forName(properties, name, true);
      }
  
      /**
diff --cc core/sis-feature/src/main/java/org/apache/sis/feature/builder/TypeBuilder.java
index 345db73,ae27b7d..677a4c1
--- a/core/sis-feature/src/main/java/org/apache/sis/feature/builder/TypeBuilder.java
+++ b/core/sis-feature/src/main/java/org/apache/sis/feature/builder/TypeBuilder.java
@@@ -453,8 -458,8 +454,8 @@@ public abstract class TypeBuilder imple
                  candidate = ((ScopedName) candidate).tail();
              }
          }
-         if (ambiguity != null) {
+         if (ambiguity != null && nonAmbiguous) {
 -            throw new PropertyNotFoundException(errors().getString(
 +            throw new IllegalArgumentException(errors().getString(
                      Errors.Keys.AmbiguousName_3, best.getName(), ambiguity.getName(), name));
          }
          return best;
diff --cc core/sis-metadata/src/test/java/org/apache/sis/internal/jaxb/metadata/replace/ServiceParameterTest.java
index 956dd6d,0159972..d4e0633
--- a/core/sis-metadata/src/test/java/org/apache/sis/internal/jaxb/metadata/replace/ServiceParameterTest.java
+++ b/core/sis-metadata/src/test/java/org/apache/sis/internal/jaxb/metadata/replace/ServiceParameterTest.java
@@@ -18,9 -18,10 +18,9 @@@ package org.apache.sis.internal.jaxb.me
  
  import javax.xml.bind.JAXBException;
  import org.opengis.util.MemberName;
 -import org.opengis.parameter.ParameterDirection;
  import org.apache.sis.xml.Namespaces;
  import org.apache.sis.util.iso.Names;
- import org.apache.sis.test.XMLTestCase;
+ import org.apache.sis.test.xml.TestCase;
  import org.junit.Test;
  
  import static org.apache.sis.test.MetadataAssert.*;
diff --cc core/sis-metadata/src/test/java/org/apache/sis/metadata/iso/citation/DefaultContactTest.java
index ee33ca6,ac3af11..eb37c03
--- a/core/sis-metadata/src/test/java/org/apache/sis/metadata/iso/citation/DefaultContactTest.java
+++ b/core/sis-metadata/src/test/java/org/apache/sis/metadata/iso/citation/DefaultContactTest.java
@@@ -20,11 -20,11 +20,11 @@@ import java.util.Arrays
  import java.util.Collection;
  import java.util.logging.LogRecord;
  import org.opengis.metadata.citation.Telephone;
 -import org.opengis.metadata.citation.TelephoneType;
  import org.apache.sis.internal.jaxb.Context;
 +import org.apache.sis.internal.geoapi.evolution.UnsupportedCodeList;
  import org.apache.sis.util.logging.WarningListener;
  import org.apache.sis.test.DependsOnMethod;
- import org.apache.sis.test.XMLTestCase;
+ import org.apache.sis.test.xml.TestCase;
  import org.junit.Test;
  
  import static org.junit.Assert.*;
diff --cc core/sis-metadata/src/test/java/org/apache/sis/metadata/sql/MetadataSourceTest.java
index 0b4e61a,e8697b6..bf97fff
--- a/core/sis-metadata/src/test/java/org/apache/sis/metadata/sql/MetadataSourceTest.java
+++ b/core/sis-metadata/src/test/java/org/apache/sis/metadata/sql/MetadataSourceTest.java
@@@ -17,10 -17,10 +17,9 @@@
  package org.apache.sis.metadata.sql;
  
  import java.util.Collections;
- import javax.sql.DataSource;
 -import org.opengis.metadata.citation.Citation;
  import org.opengis.metadata.distribution.Format;
  import org.apache.sis.metadata.MetadataStandard;
- import org.apache.sis.internal.metadata.sql.TestDatabase;
+ import org.apache.sis.test.sql.TestDatabase;
  import org.apache.sis.util.iso.SimpleInternationalString;
  import org.apache.sis.metadata.iso.citation.DefaultCitation;
  import org.apache.sis.metadata.iso.distribution.DefaultFormat;
diff --cc core/sis-metadata/src/test/java/org/apache/sis/metadata/sql/MetadataWriterTest.java
index e72d532,8b4d6a2..ef6b135
--- a/core/sis-metadata/src/test/java/org/apache/sis/metadata/sql/MetadataWriterTest.java
+++ b/core/sis-metadata/src/test/java/org/apache/sis/metadata/sql/MetadataWriterTest.java
@@@ -17,8 -17,7 +17,6 @@@
  package org.apache.sis.metadata.sql;
  
  import java.util.Collections;
- import javax.sql.DataSource;
- import org.postgresql.ds.PGSimpleDataSource;
 -import org.opengis.metadata.citation.Contact;
  import org.opengis.metadata.citation.Citation;
  import org.opengis.metadata.citation.PresentationForm;
  import org.opengis.metadata.citation.OnLineFunction;
diff --cc storage/sis-earth-observation/src/main/java/org/apache/sis/storage/earthobservation/LandsatReader.java
index e48767b,45eaea3..35a0a44
--- a/storage/sis-earth-observation/src/main/java/org/apache/sis/storage/earthobservation/LandsatReader.java
+++ b/storage/sis-earth-observation/src/main/java/org/apache/sis/storage/earthobservation/LandsatReader.java
@@@ -887,7 -895,8 +895,8 @@@ final class LandsatReader 
       */
      final Metadata getMetadata() throws FactoryException {
          metadata.addLanguage(Locale.ENGLISH, MetadataBuilder.Scope.METADATA);
 -        metadata.addResourceScope(ScopeCode.COVERAGE, null);
 +        metadata.addResourceScope(ScopeCode.valueOf("COVERAGE"), null);
+         metadata.addTopicCategory(TopicCategory.GEOSCIENTIFIC_INFORMATION);
          try {
              flushSceneTime();
          } catch (DateTimeException e) {
diff --cc storage/sis-earth-observation/src/test/java/org/apache/sis/storage/earthobservation/LandsatReaderTest.java
index 703e81a,aa887c0..a7e5d9b
--- a/storage/sis-earth-observation/src/test/java/org/apache/sis/storage/earthobservation/LandsatReaderTest.java
+++ b/storage/sis-earth-observation/src/test/java/org/apache/sis/storage/earthobservation/LandsatReaderTest.java
@@@ -91,197 -101,174 +91,203 @@@ public class LandsatReaderTest extends 
              reader.read(in);
              actual = reader.getMetadata();
          }
 -        final ContentVerifier verifier = new ContentVerifier();
 -        verifier.addPropertyToIgnore(Metadata.class, "metadataStandard");           // Because hard-coded in SIS.
 -        verifier.addPropertyToIgnore(Metadata.class, "referenceSystemInfo");        // Very verbose and depends on EPSG connection.
 -        verifier.addMetadataToVerify(actual);
 -        verifier.assertMetadataEquals(
 -            "language[0]",                                                                           "en",
 -            "metadataIdentifier.code",                                                               "LandsatTest",
 -            "metadataScope[0].resourceScope",                                                        ScopeCode.COVERAGE,
 -            "dateInfo[0].date",                                                                      date("2016-06-27 16:48:12"),
 -            "dateInfo[0].dateType",                                                                  DateType.CREATION,
 -            "identificationInfo[0].topicCategory[0]",                                                TopicCategory.GEOSCIENTIFIC_INFORMATION,
 -            "identificationInfo[0].citation.date[0].date",                                           date("2016-06-27 16:48:12"),
 -            "identificationInfo[0].citation.date[0].dateType",                                       DateType.CREATION,
 -            "identificationInfo[0].citation.title",                                                  "LandsatTest",
 -            "identificationInfo[0].credit[0]",                                                       "Derived from U.S. Geological Survey data",
 -            "identificationInfo[0].resourceFormat[0].formatSpecificationCitation.title",             "GeoTIFF Coverage Encoding Profile",
 -            "identificationInfo[0].resourceFormat[0].formatSpecificationCitation.alternateTitle[0]", "GeoTIFF",
 -            "identificationInfo[0].extent[0].geographicElement[0].extentTypeCode",                   true,
 -            "identificationInfo[0].extent[0].geographicElement[0].westBoundLongitude",               108.34,
 -            "identificationInfo[0].extent[0].geographicElement[0].eastBoundLongitude",               110.44,
 -            "identificationInfo[0].extent[0].geographicElement[0].southBoundLatitude",                10.50,
 -            "identificationInfo[0].extent[0].geographicElement[0].northBoundLatitude",                12.62,
 -            "identificationInfo[0].spatialResolution[0].distance",                                    15.0,
 -            "identificationInfo[0].spatialResolution[1].distance",                                    30.0,
 -
 -            "acquisitionInformation[0].platform[0].identifier.code",               "Pseudo LANDSAT",
 -            "acquisitionInformation[0].platform[0].instrument[0].identifier.code", "Pseudo TIRS",
 -            "acquisitionInformation[0].acquisitionRequirement[0].identifier.code", "Software unit tests",
 -            "acquisitionInformation[0].operation[0].significantEvent[0].context",  Context.ACQUISITION,
 -            "acquisitionInformation[0].operation[0].significantEvent[0].time",     date("2016-06-26 03:02:01.090"),
 -            "acquisitionInformation[0].operation[0].status",                       Progress.COMPLETED,
 -            "acquisitionInformation[0].operation[0].type",                         OperationType.REAL,
 -
 -            "contentInfo[0].processingLevelCode.authority.title",          "Landsat",
 -            "contentInfo[0].processingLevelCode.codeSpace",                "Landsat",
 -            "contentInfo[0].processingLevelCode.code",                     "Pseudo LT1",
 -
 -            "contentInfo[0].attributeGroup[0].attribute[0].name[0].code",  "TestImage_B1.TIF",
 -            "contentInfo[0].attributeGroup[0].attribute[1].name[0].code",  "TestImage_B2.TIF",
 -            "contentInfo[0].attributeGroup[0].attribute[2].name[0].code",  "TestImage_B3.TIF",
 -            "contentInfo[0].attributeGroup[0].attribute[3].name[0].code",  "TestImage_B4.TIF",
 -            "contentInfo[0].attributeGroup[0].attribute[4].name[0].code",  "TestImage_B5.TIF",
 -            "contentInfo[0].attributeGroup[0].attribute[5].name[0].code",  "TestImage_B6.TIF",
 -            "contentInfo[0].attributeGroup[0].attribute[6].name[0].code",  "TestImage_B7.TIF",
 -            "contentInfo[0].attributeGroup[0].attribute[7].name[0].code",  "TestImage_B9.TIF",
 -            "contentInfo[0].attributeGroup[1].attribute[0].name[0].code",  "TestImage_B8.TIF",
 -            "contentInfo[0].attributeGroup[2].attribute[0].name[0].code",  "TestImage_B10.TIF",
 -            "contentInfo[0].attributeGroup[2].attribute[1].name[0].code",  "TestImage_B11.TIF",
 -
 -            "contentInfo[0].attributeGroup[0].attribute[0].description",   "Coastal Aerosol",
 -            "contentInfo[0].attributeGroup[0].attribute[1].description",   "Blue",
 -            "contentInfo[0].attributeGroup[0].attribute[2].description",   "Green",
 -            "contentInfo[0].attributeGroup[0].attribute[3].description",   "Red",
 -            "contentInfo[0].attributeGroup[0].attribute[4].description",   "Near-Infrared",
 -            "contentInfo[0].attributeGroup[0].attribute[5].description",   "Short Wavelength Infrared (SWIR) 1",
 -            "contentInfo[0].attributeGroup[0].attribute[6].description",   "Short Wavelength Infrared (SWIR) 2",
 -            "contentInfo[0].attributeGroup[0].attribute[7].description",   "Cirrus",
 -            "contentInfo[0].attributeGroup[1].attribute[0].description",   "Panchromatic",
 -            "contentInfo[0].attributeGroup[2].attribute[0].description",   "Thermal Infrared Sensor (TIRS) 1",
 -            "contentInfo[0].attributeGroup[2].attribute[1].description",   "Thermal Infrared Sensor (TIRS) 2",
 -
 -            "contentInfo[0].attributeGroup[0].attribute[0].minValue",      1.0,
 -            "contentInfo[0].attributeGroup[0].attribute[1].minValue",      1.0,
 -            "contentInfo[0].attributeGroup[0].attribute[2].minValue",      1.0,
 -            "contentInfo[0].attributeGroup[0].attribute[3].minValue",      1.0,
 -            "contentInfo[0].attributeGroup[0].attribute[4].minValue",      1.0,
 -            "contentInfo[0].attributeGroup[0].attribute[5].minValue",      1.0,
 -            "contentInfo[0].attributeGroup[0].attribute[6].minValue",      1.0,
 -            "contentInfo[0].attributeGroup[0].attribute[7].minValue",      1.0,
 -            "contentInfo[0].attributeGroup[1].attribute[0].minValue",      1.0,
 -            "contentInfo[0].attributeGroup[2].attribute[0].minValue",      1.0,
 -            "contentInfo[0].attributeGroup[2].attribute[1].minValue",      1.0,
 -
 -            "contentInfo[0].attributeGroup[0].attribute[0].maxValue",      65535.0,
 -            "contentInfo[0].attributeGroup[0].attribute[1].maxValue",      65535.0,
 -            "contentInfo[0].attributeGroup[0].attribute[2].maxValue",      65535.0,
 -            "contentInfo[0].attributeGroup[0].attribute[3].maxValue",      65535.0,
 -            "contentInfo[0].attributeGroup[0].attribute[4].maxValue",      65535.0,
 -            "contentInfo[0].attributeGroup[0].attribute[5].maxValue",      65535.0,
 -            "contentInfo[0].attributeGroup[0].attribute[6].maxValue",      65535.0,
 -            "contentInfo[0].attributeGroup[0].attribute[7].maxValue",      65535.0,
 -            "contentInfo[0].attributeGroup[1].attribute[0].maxValue",      65535.0,
 -            "contentInfo[0].attributeGroup[2].attribute[0].maxValue",      65535.0,
 -            "contentInfo[0].attributeGroup[2].attribute[1].maxValue",      65535.0,
 -
 -            "contentInfo[0].attributeGroup[0].attribute[0].peakResponse",    433.0,
 -            "contentInfo[0].attributeGroup[0].attribute[1].peakResponse",    482.0,
 -            "contentInfo[0].attributeGroup[0].attribute[2].peakResponse",    562.0,
 -            "contentInfo[0].attributeGroup[0].attribute[3].peakResponse",    655.0,
 -            "contentInfo[0].attributeGroup[0].attribute[4].peakResponse",    865.0,
 -            "contentInfo[0].attributeGroup[0].attribute[5].peakResponse",   1610.0,
 -            "contentInfo[0].attributeGroup[0].attribute[6].peakResponse",   2200.0,
 -            "contentInfo[0].attributeGroup[0].attribute[7].peakResponse",   1375.0,
 -            "contentInfo[0].attributeGroup[1].attribute[0].peakResponse",    590.0,
 -            "contentInfo[0].attributeGroup[2].attribute[0].peakResponse",  10800.0,
 -            "contentInfo[0].attributeGroup[2].attribute[1].peakResponse",  12000.0,
 -
 -            "contentInfo[0].attributeGroup[0].attribute[0].transferFunctionType",  TransferFunctionType.LINEAR,
 -            "contentInfo[0].attributeGroup[0].attribute[1].transferFunctionType",  TransferFunctionType.LINEAR,
 -            "contentInfo[0].attributeGroup[0].attribute[2].transferFunctionType",  TransferFunctionType.LINEAR,
 -            "contentInfo[0].attributeGroup[0].attribute[3].transferFunctionType",  TransferFunctionType.LINEAR,
 -            "contentInfo[0].attributeGroup[0].attribute[4].transferFunctionType",  TransferFunctionType.LINEAR,
 -            "contentInfo[0].attributeGroup[0].attribute[5].transferFunctionType",  TransferFunctionType.LINEAR,
 -            "contentInfo[0].attributeGroup[0].attribute[6].transferFunctionType",  TransferFunctionType.LINEAR,
 -            "contentInfo[0].attributeGroup[0].attribute[7].transferFunctionType",  TransferFunctionType.LINEAR,
 -            "contentInfo[0].attributeGroup[1].attribute[0].transferFunctionType",  TransferFunctionType.LINEAR,
 -            "contentInfo[0].attributeGroup[2].attribute[0].transferFunctionType",  TransferFunctionType.LINEAR,
 -            "contentInfo[0].attributeGroup[2].attribute[1].transferFunctionType",  TransferFunctionType.LINEAR,
 -
 -            "contentInfo[0].attributeGroup[0].attribute[0].scaleFactor",  0.0127,
 -            "contentInfo[0].attributeGroup[0].attribute[1].scaleFactor",  0.013,
 -            "contentInfo[0].attributeGroup[0].attribute[2].scaleFactor",  0.012,
 -            "contentInfo[0].attributeGroup[0].attribute[3].scaleFactor",  0.0101,
 -            "contentInfo[0].attributeGroup[0].attribute[4].scaleFactor",  0.00619,
 -            "contentInfo[0].attributeGroup[0].attribute[5].scaleFactor",  0.00154,
 -            "contentInfo[0].attributeGroup[0].attribute[6].scaleFactor",  0.000519,
 -            "contentInfo[0].attributeGroup[0].attribute[7].scaleFactor",  0.00242,
 -            "contentInfo[0].attributeGroup[1].attribute[0].scaleFactor",  0.0115,
 -            "contentInfo[0].attributeGroup[2].attribute[0].scaleFactor",  0.000334,
 -            "contentInfo[0].attributeGroup[2].attribute[1].scaleFactor",  0.000334,
 -
 -            "contentInfo[0].attributeGroup[0].attribute[0].offset",       -63.6,
 -            "contentInfo[0].attributeGroup[0].attribute[1].offset",       -65.1,
 -            "contentInfo[0].attributeGroup[0].attribute[2].offset",       -60.0,
 -            "contentInfo[0].attributeGroup[0].attribute[3].offset",       -50.6,
 -            "contentInfo[0].attributeGroup[0].attribute[4].offset",       -31.0,
 -            "contentInfo[0].attributeGroup[0].attribute[5].offset",       -7.7,
 -            "contentInfo[0].attributeGroup[0].attribute[6].offset",       -2.6,
 -            "contentInfo[0].attributeGroup[0].attribute[7].offset",       -12.1,
 -            "contentInfo[0].attributeGroup[1].attribute[0].offset",       -57.3,
 -            "contentInfo[0].attributeGroup[2].attribute[0].offset",       0.1,
 -            "contentInfo[0].attributeGroup[2].attribute[1].offset",       0.1,
 -
 -            "contentInfo[0].attributeGroup[0].attribute[0].boundUnits",   "nm",
 -            "contentInfo[0].attributeGroup[0].attribute[1].boundUnits",   "nm",
 -            "contentInfo[0].attributeGroup[0].attribute[2].boundUnits",   "nm",
 -            "contentInfo[0].attributeGroup[0].attribute[3].boundUnits",   "nm",
 -            "contentInfo[0].attributeGroup[0].attribute[4].boundUnits",   "nm",
 -            "contentInfo[0].attributeGroup[0].attribute[5].boundUnits",   "nm",
 -            "contentInfo[0].attributeGroup[0].attribute[6].boundUnits",   "nm",
 -            "contentInfo[0].attributeGroup[0].attribute[7].boundUnits",   "nm",
 -            "contentInfo[0].attributeGroup[1].attribute[0].boundUnits",   "nm",
 -            "contentInfo[0].attributeGroup[2].attribute[0].boundUnits",   "nm",
 -            "contentInfo[0].attributeGroup[2].attribute[1].boundUnits",   "nm",
 -
 -            "contentInfo[0].attributeGroup[0].contentType[0]", CoverageContentType.PHYSICAL_MEASUREMENT,
 -            "contentInfo[0].attributeGroup[1].contentType[0]", CoverageContentType.PHYSICAL_MEASUREMENT,
 -            "contentInfo[0].attributeGroup[2].contentType[0]", CoverageContentType.PHYSICAL_MEASUREMENT,
 -
 -            "contentInfo[0].cloudCoverPercentage",         8.3,
 -            "contentInfo[0].illuminationAzimuthAngle",   116.9,
 -            "contentInfo[0].illuminationElevationAngle",  58.8,
 -
 -            "spatialRepresentationInfo[0].numberOfDimensions",                       2,
 -            "spatialRepresentationInfo[1].numberOfDimensions",                       2,
 -            "spatialRepresentationInfo[0].axisDimensionProperties[0].dimensionName", DimensionNameType.SAMPLE,
 -            "spatialRepresentationInfo[1].axisDimensionProperties[0].dimensionName", DimensionNameType.SAMPLE,
 -            "spatialRepresentationInfo[0].axisDimensionProperties[1].dimensionName", DimensionNameType.LINE,
 -            "spatialRepresentationInfo[1].axisDimensionProperties[1].dimensionName", DimensionNameType.LINE,
 -            "spatialRepresentationInfo[0].axisDimensionProperties[0].dimensionSize", 15000,
 -            "spatialRepresentationInfo[0].axisDimensionProperties[1].dimensionSize", 15500,
 -            "spatialRepresentationInfo[1].axisDimensionProperties[0].dimensionSize", 7600,
 -            "spatialRepresentationInfo[1].axisDimensionProperties[1].dimensionSize", 7800,
 -            "spatialRepresentationInfo[0].transformationParameterAvailability",      false,
 -            "spatialRepresentationInfo[1].transformationParameterAvailability",      false,
 -            "spatialRepresentationInfo[0].checkPointAvailability",                   false,
 -            "spatialRepresentationInfo[1].checkPointAvailability",                   false,
 -
 -            "resourceLineage[0].source[0].description", "Pseudo GLS");
 +        final String text = formatNameAndValue(DefaultMetadata.castOrCopy(actual).asTreeTable());
 +        assertMultilinesEquals(
 +                "Metadata\n"
 +                + "  ├─Metadata identifier……………………………………………………………… LandsatTest\n"
 +                + "  ├─Metadata standard (1 of 2)…………………………………………… Geographic Information — Metadata Part 1: Fundamentals\n"
 +                + "  │   ├─Edition…………………………………………………………………………………… ISO 19115-1:2014(E)\n"
 +                + "  │   ├─Identifier…………………………………………………………………………… 19115-1\n"
 +                + "  │   ├─Cited responsible party\n"
 +                + "  │   │   ├─Role………………………………………………………………………………… Principal investigator\n"
 +                + "  │   │   └─Party……………………………………………………………………………… International Organization for Standardization\n"
 +                + "  │   └─Presentation form………………………………………………………… Document digital\n"
 +                + "  ├─Metadata standard (2 of 2)…………………………………………… Geographic Information — Metadata Part 2: Extensions for imagery and gridded data\n"
 +                + "  │   ├─Edition…………………………………………………………………………………… ISO 19115-2:2009(E)\n"
 +                + "  │   ├─Identifier…………………………………………………………………………… 19115-2\n"
 +                + "  │   ├─Cited responsible party\n"
 +                + "  │   │   ├─Role………………………………………………………………………………… Principal investigator\n"
 +                + "  │   │   └─Party……………………………………………………………………………… International Organization for Standardization\n"
 +                + "  │   └─Presentation form………………………………………………………… Document digital\n"
 +                + "  ├─Spatial representation info (1 of 2)\n"
 +                + "  │   ├─Number of dimensions………………………………………………… 2\n"
 +                + "  │   ├─Axis dimension properties (1 of 2)…………… Sample\n"
 +                + "  │   │   └─Dimension size……………………………………………………… 15000\n"
 +                + "  │   ├─Axis dimension properties (2 of 2)…………… Line\n"
 +                + "  │   │   └─Dimension size……………………………………………………… 15500\n"
 +                + "  │   ├─Transformation parameter availability…… false\n"
 +                + "  │   └─Check point availability……………………………………… false\n"
 +                + "  ├─Spatial representation info (2 of 2)\n"
 +                + "  │   ├─Number of dimensions………………………………………………… 2\n"
 +                + "  │   ├─Axis dimension properties (1 of 2)…………… Sample\n"
 +                + "  │   │   └─Dimension size……………………………………………………… 7600\n"
 +                + "  │   ├─Axis dimension properties (2 of 2)…………… Line\n"
 +                + "  │   │   └─Dimension size……………………………………………………… 7800\n"
 +                + "  │   ├─Transformation parameter availability…… false\n"
 +                + "  │   └─Check point availability……………………………………… false\n"
 +                + "  ├─Reference system info………………………………………………………… EPSG:WGS 84 / UTM zone 49N\n"
 +                + "  ├─Identification info\n"
 +                + "  │   ├─Citation………………………………………………………………………………… LandsatTest\n"
 +                + "  │   │   └─Date………………………………………………………………………………… 2016-06-27 16:48:12\n"
 +                + "  │   │       └─Date type………………………………………………………… Creation\n"
 +                + "  │   ├─Credit……………………………………………………………………………………… Derived from U.S. Geological Survey data\n"
 +                + "  │   ├─Spatial resolution (1 of 2)\n"
 +                + "  │   │   └─Distance……………………………………………………………………… 15.0\n"
 +                + "  │   ├─Spatial resolution (2 of 2)\n"
 +                + "  │   │   └─Distance……………………………………………………………………… 30.0\n"
++                + "  │   ├─Topic category………………………………………………………………… Geoscientific information\n"
 +                + "  │   ├─Extent\n"
 +                + "  │   │   └─Geographic element\n"
 +                + "  │   │       ├─West bound longitude…………………………… 108°20′24″E\n"
 +                + "  │   │       ├─East bound longitude…………………………… 110°26′24″E\n"
 +                + "  │   │       ├─South bound latitude…………………………… 10°30′N\n"
 +                + "  │   │       ├─North bound latitude…………………………… 12°37′12″N\n"
 +                + "  │   │       └─Extent type code……………………………………… true\n"
 +                + "  │   └─Resource format\n"
 +                + "  │       └─Format specification citation……………… GeoTIFF Coverage Encoding Profile\n"
 +                + "  │           └─Alternate title………………………………………… GeoTIFF\n"
 +                + "  ├─Content info\n"
++                + "  │   ├─Processing level code……………………………………………… Pseudo LT1\n"
++                + "  │   │   ├─Authority…………………………………………………………………… Landsat\n"
++                + "  │   │   └─Code space………………………………………………………………… Landsat\n"
 +                + "  │   ├─Attribute group (1 of 3)\n"
 +                + "  │   │   ├─Content type…………………………………………………………… Physical measurement\n"
 +                + "  │   │   ├─Attribute (1 of 8)\n"
 +                + "  │   │   │   ├─Description…………………………………………………… Coastal Aerosol\n"
 +                + "  │   │   │   ├─Name……………………………………………………………………… TestImage_B1.TIF\n"
 +                + "  │   │   │   ├─Max value………………………………………………………… 65535.0\n"
 +                + "  │   │   │   ├─Min value………………………………………………………… 1.0\n"
 +                + "  │   │   │   ├─Scale factor………………………………………………… 0.0127\n"
 +                + "  │   │   │   ├─Offset………………………………………………………………… -63.6\n"
 +                + "  │   │   │   ├─Bound units…………………………………………………… nm\n"
 +                + "  │   │   │   ├─Peak response……………………………………………… 433.0\n"
 +                + "  │   │   │   └─Transfer function type……………………… Linear\n"
 +                + "  │   │   ├─Attribute (2 of 8)\n"
 +                + "  │   │   │   ├─Description…………………………………………………… Blue\n"
 +                + "  │   │   │   ├─Name……………………………………………………………………… TestImage_B2.TIF\n"
 +                + "  │   │   │   ├─Max value………………………………………………………… 65535.0\n"
 +                + "  │   │   │   ├─Min value………………………………………………………… 1.0\n"
 +                + "  │   │   │   ├─Scale factor………………………………………………… 0.013\n"
 +                + "  │   │   │   ├─Offset………………………………………………………………… -65.1\n"
 +                + "  │   │   │   ├─Bound units…………………………………………………… nm\n"
 +                + "  │   │   │   ├─Peak response……………………………………………… 482.0\n"
 +                + "  │   │   │   └─Transfer function type……………………… Linear\n"
 +                + "  │   │   ├─Attribute (3 of 8)\n"
 +                + "  │   │   │   ├─Description…………………………………………………… Green\n"
 +                + "  │   │   │   ├─Name……………………………………………………………………… TestImage_B3.TIF\n"
 +                + "  │   │   │   ├─Max value………………………………………………………… 65535.0\n"
 +                + "  │   │   │   ├─Min value………………………………………………………… 1.0\n"
 +                + "  │   │   │   ├─Scale factor………………………………………………… 0.012\n"
 +                + "  │   │   │   ├─Offset………………………………………………………………… -60.0\n"
 +                + "  │   │   │   ├─Bound units…………………………………………………… nm\n"
 +                + "  │   │   │   ├─Peak response……………………………………………… 562.0\n"
 +                + "  │   │   │   └─Transfer function type……………………… Linear\n"
 +                + "  │   │   ├─Attribute (4 of 8)\n"
 +                + "  │   │   │   ├─Description…………………………………………………… Red\n"
 +                + "  │   │   │   ├─Name……………………………………………………………………… TestImage_B4.TIF\n"
 +                + "  │   │   │   ├─Max value………………………………………………………… 65535.0\n"
 +                + "  │   │   │   ├─Min value………………………………………………………… 1.0\n"
 +                + "  │   │   │   ├─Scale factor………………………………………………… 0.0101\n"
 +                + "  │   │   │   ├─Offset………………………………………………………………… -50.6\n"
 +                + "  │   │   │   ├─Bound units…………………………………………………… nm\n"
 +                + "  │   │   │   ├─Peak response……………………………………………… 655.0\n"
 +                + "  │   │   │   └─Transfer function type……………………… Linear\n"
 +                + "  │   │   ├─Attribute (5 of 8)\n"
 +                + "  │   │   │   ├─Description…………………………………………………… Near-Infrared\n"
 +                + "  │   │   │   ├─Name……………………………………………………………………… TestImage_B5.TIF\n"
 +                + "  │   │   │   ├─Max value………………………………………………………… 65535.0\n"
 +                + "  │   │   │   ├─Min value………………………………………………………… 1.0\n"
 +                + "  │   │   │   ├─Scale factor………………………………………………… 0.00619\n"
 +                + "  │   │   │   ├─Offset………………………………………………………………… -31.0\n"
 +                + "  │   │   │   ├─Bound units…………………………………………………… nm\n"
 +                + "  │   │   │   ├─Peak response……………………………………………… 865.0\n"
 +                + "  │   │   │   └─Transfer function type……………………… Linear\n"
 +                + "  │   │   ├─Attribute (6 of 8)\n"
 +                + "  │   │   │   ├─Description…………………………………………………… Short Wavelength Infrared (SWIR) 1\n"
 +                + "  │   │   │   ├─Name……………………………………………………………………… TestImage_B6.TIF\n"
 +                + "  │   │   │   ├─Max value………………………………………………………… 65535.0\n"
 +                + "  │   │   │   ├─Min value………………………………………………………… 1.0\n"
 +                + "  │   │   │   ├─Scale factor………………………………………………… 0.00154\n"
 +                + "  │   │   │   ├─Offset………………………………………………………………… -7.7\n"
 +                + "  │   │   │   ├─Bound units…………………………………………………… nm\n"
 +                + "  │   │   │   ├─Peak response……………………………………………… 1610.0\n"
 +                + "  │   │   │   └─Transfer function type……………………… Linear\n"
 +                + "  │   │   ├─Attribute (7 of 8)\n"
 +                + "  │   │   │   ├─Description…………………………………………………… Short Wavelength Infrared (SWIR) 2\n"
 +                + "  │   │   │   ├─Name……………………………………………………………………… TestImage_B7.TIF\n"
 +                + "  │   │   │   ├─Max value………………………………………………………… 65535.0\n"
 +                + "  │   │   │   ├─Min value………………………………………………………… 1.0\n"
 +                + "  │   │   │   ├─Scale factor………………………………………………… 5.19E-4\n"
 +                + "  │   │   │   ├─Offset………………………………………………………………… -2.6\n"
 +                + "  │   │   │   ├─Bound units…………………………………………………… nm\n"
 +                + "  │   │   │   ├─Peak response……………………………………………… 2200.0\n"
 +                + "  │   │   │   └─Transfer function type……………………… Linear\n"
 +                + "  │   │   └─Attribute (8 of 8)\n"
 +                + "  │   │       ├─Description…………………………………………………… Cirrus\n"
 +                + "  │   │       ├─Name……………………………………………………………………… TestImage_B9.TIF\n"
 +                + "  │   │       ├─Max value………………………………………………………… 65535.0\n"
 +                + "  │   │       ├─Min value………………………………………………………… 1.0\n"
 +                + "  │   │       ├─Scale factor………………………………………………… 0.00242\n"
 +                + "  │   │       ├─Offset………………………………………………………………… -12.1\n"
 +                + "  │   │       ├─Bound units…………………………………………………… nm\n"
 +                + "  │   │       ├─Peak response……………………………………………… 1375.0\n"
 +                + "  │   │       └─Transfer function type……………………… Linear\n"
 +                + "  │   ├─Attribute group (2 of 3)\n"
 +                + "  │   │   ├─Content type…………………………………………………………… Physical measurement\n"
 +                + "  │   │   └─Attribute\n"
 +                + "  │   │       ├─Description…………………………………………………… Panchromatic\n"
 +                + "  │   │       ├─Name……………………………………………………………………… TestImage_B8.TIF\n"
 +                + "  │   │       ├─Max value………………………………………………………… 65535.0\n"
 +                + "  │   │       ├─Min value………………………………………………………… 1.0\n"
 +                + "  │   │       ├─Scale factor………………………………………………… 0.0115\n"
 +                + "  │   │       ├─Offset………………………………………………………………… -57.3\n"
 +                + "  │   │       ├─Bound units…………………………………………………… nm\n"
 +                + "  │   │       ├─Peak response……………………………………………… 590.0\n"
 +                + "  │   │       └─Transfer function type……………………… Linear\n"
 +                + "  │   ├─Attribute group (3 of 3)\n"
 +                + "  │   │   ├─Content type…………………………………………………………… Physical measurement\n"
 +                + "  │   │   ├─Attribute (1 of 2)\n"
 +                + "  │   │   │   ├─Description…………………………………………………… Thermal Infrared Sensor (TIRS) 1\n"
 +                + "  │   │   │   ├─Name……………………………………………………………………… TestImage_B10.TIF\n"
 +                + "  │   │   │   ├─Max value………………………………………………………… 65535.0\n"
 +                + "  │   │   │   ├─Min value………………………………………………………… 1.0\n"
 +                + "  │   │   │   ├─Scale factor………………………………………………… 3.34E-4\n"
 +                + "  │   │   │   ├─Offset………………………………………………………………… 0.1\n"
 +                + "  │   │   │   ├─Bound units…………………………………………………… nm\n"
 +                + "  │   │   │   ├─Peak response……………………………………………… 10800.0\n"
 +                + "  │   │   │   └─Transfer function type……………………… Linear\n"
 +                + "  │   │   └─Attribute (2 of 2)\n"
 +                + "  │   │       ├─Description…………………………………………………… Thermal Infrared Sensor (TIRS) 2\n"
 +                + "  │   │       ├─Name……………………………………………………………………… TestImage_B11.TIF\n"
 +                + "  │   │       ├─Max value………………………………………………………… 65535.0\n"
 +                + "  │   │       ├─Min value………………………………………………………… 1.0\n"
 +                + "  │   │       ├─Scale factor………………………………………………… 3.34E-4\n"
 +                + "  │   │       ├─Offset………………………………………………………………… 0.1\n"
 +                + "  │   │       ├─Bound units…………………………………………………… nm\n"
 +                + "  │   │       ├─Peak response……………………………………………… 12000.0\n"
 +                + "  │   │       └─Transfer function type……………………… Linear\n"
 +                + "  │   ├─Illumination elevation angle…………………………… 58.8\n"
 +                + "  │   ├─Illumination azimuth angle………………………………… 116.9\n"
 +                + "  │   └─Cloud cover percentage…………………………………………… 8.3\n"
++                + "  ├─Resource lineage\n"
++                + "  │   └─Source……………………………………………………………………………………… Pseudo GLS\n"
 +                + "  ├─Metadata scope\n"
 +                + "  │   └─Resource scope………………………………………………………………… COVERAGE\n"
 +                + "  ├─Acquisition information\n"
 +                + "  │   ├─Acquisition requirement\n"
 +                + "  │   │   └─Identifier………………………………………………………………… Software unit tests\n"
 +                + "  │   ├─Operation\n"
 +                + "  │   │   ├─Status…………………………………………………………………………… Completed\n"
 +                + "  │   │   ├─Type………………………………………………………………………………… Real\n"
 +                + "  │   │   └─Significant event\n"
 +                + "  │   │       ├─Context……………………………………………………………… Acquisition\n"
 +                + "  │   │       └─Time……………………………………………………………………… 2016-06-26 03:02:01\n"
 +                + "  │   └─Platform\n"
 +                + "  │       ├─Identifier………………………………………………………………… Pseudo LANDSAT\n"
 +                + "  │       └─Instrument\n"
 +                + "  │           └─Identifier……………………………………………………… Pseudo TIRS\n"
 +                + "  ├─Date info………………………………………………………………………………………… 2016-06-27 16:48:12\n"
 +                + "  │   └─Date type……………………………………………………………………………… Creation\n"
 +                + "  └─Default locale+other locale………………………………………… en\n", text);
      }
  }
diff --cc storage/sis-netcdf/src/main/java/org/apache/sis/storage/netcdf/MetadataReader.java
index 1aacd21,44ce727..f07446f
--- a/storage/sis-netcdf/src/main/java/org/apache/sis/storage/netcdf/MetadataReader.java
+++ b/storage/sis-netcdf/src/main/java/org/apache/sis/storage/netcdf/MetadataReader.java
@@@ -49,8 -49,6 +49,7 @@@ import org.opengis.referencing.cs.AxisD
  import org.opengis.referencing.crs.VerticalCRS;
  
  import org.apache.sis.util.iso.Types;
 +import org.apache.sis.util.iso.DefaultNameFactory;
- import org.apache.sis.util.iso.SimpleInternationalString;
  import org.apache.sis.util.logging.WarningListeners;
  import org.apache.sis.storage.DataStore;
  import org.apache.sis.storage.DataStoreException;
diff --cc storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Features.java
index 0000000,57e9c1f..cc64a80
mode 000000,100644..100644
--- a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Features.java
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Features.java
@@@ -1,0 -1,499 +1,499 @@@
+ /*
+  * Licensed to the Apache Software Foundation (ASF) under one or more
+  * contributor license agreements.  See the NOTICE file distributed with
+  * this work for additional information regarding copyright ownership.
+  * The ASF licenses this file to You under the Apache License, Version 2.0
+  * (the "License"); you may not use this file except in compliance with
+  * the License.  You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+ package org.apache.sis.internal.sql.feature;
+ 
+ import java.util.List;
+ import java.util.ArrayList;
+ import java.util.Map;
+ import java.util.HashMap;
+ import java.util.Collection;
+ import java.util.Spliterator;
+ import java.util.function.Consumer;
+ import java.sql.Connection;
+ import java.sql.DatabaseMetaData;
+ import java.sql.Statement;
+ import java.sql.PreparedStatement;
+ import java.sql.ResultSet;
+ import java.sql.SQLException;
+ import java.lang.reflect.Array;
+ import org.apache.sis.internal.metadata.sql.SQLBuilder;
+ import org.apache.sis.storage.InternalDataStoreException;
+ import org.apache.sis.util.collection.BackingStoreException;
+ import org.apache.sis.util.collection.WeakValueHashMap;
+ import org.apache.sis.util.ArraysExt;
+ 
+ // Branch-dependent imports
 -import org.opengis.feature.Feature;
 -import org.opengis.feature.FeatureType;
++import org.apache.sis.feature.AbstractFeature;
++import org.apache.sis.feature.DefaultFeatureType;
+ 
+ 
+ /**
+  * Iterator over feature instances.
+  *
+  * @author  Martin Desruisseaux (Geomatys)
+  * @version 1.0
+  * @since   1.0
+  * @module
+  */
 -final class Features implements Spliterator<Feature>, Runnable {
++final class Features implements Spliterator<AbstractFeature>, Runnable {
+     /**
+      * An empty array of iterators, used when there is no dependency.
+      */
+     private static final Features[] EMPTY = new Features[0];
+ 
+     /**
+      * The type of features to create.
+      */
 -    private final FeatureType featureType;
++    private final DefaultFeatureType featureType;
+ 
+     /**
+      * Name of attributes in feature instances, excluding operations and associations to other tables.
+      * Those names are in the order of columns declared in the {@code SELECT <columns} statement.
+      * This array is a shared instance and shall not be modified.
+      */
+     private final String[] attributeNames;
+ 
+     /**
+      * Name of the properties where are stored associations in feature instances.
+      * The length of this array shall be equal to the {@link #dependencies} array length.
+      * Imported or exported features read by {@code dependencies[i]} will be stored in
+      * the association named {@code associationNames[i]}.
+      */
+     private final String[] associationNames;
+ 
+     /**
+      * Name of the property where to store the association that we can not handle with other {@link #dependencies}.
+      * This deferred association may exist because of circular dependency.
+      */
+     private final String deferredAssociation;
+ 
+     /**
+      * The feature sets referenced through foreigner keys, or {@link #EMPTY} if none.
+      * This includes the associations inferred from both the imported and exported keys.
+      * The first {@link #importCount} iterators are for imported keys, and the remaining
+      * iterators are for the exported keys.
+      */
+     private final Features[] dependencies;
+ 
+     /**
+      * Number of entries in {@link #dependencies} for {@link Relation.Direction#IMPORT}.
+      * The entries immediately following the first {@code importCount} entries are for
+      * {@link Relation.Direction#EXPORT}.
+      */
+     private final int importCount;
+ 
+     /**
+      * One-based indices of the columns to query for each {@link #dependencies} entry.
+      */
+     private final int[][] foreignerKeyIndices;
+ 
+     /**
+      * If this iterator returns only the feature matching some condition (typically a primary key value),
+      * the statement for performing that filtering. Otherwise if this iterator returns all features, then
+      * this field is {@code null}.
+      */
+     private final PreparedStatement statement;
+ 
+     /**
+      * The result of executing the SQL query for a {@link Table}. If {@link #statement} is null,
+      * then a single {@code ResultSet} is used for all the lifetime of this {@code Features} instance.
+      * Otherwise an arbitrary amount of {@code ResultSet}s may be created from the statement.
+      */
+     private ResultSet result;
+ 
+     /**
+      * Feature instances already created, or {@code null} if the features created by this iterator are not cached.
+      * This map is used when requesting a feature by identifier, not when iterating over all features (note: we
+      * could perform an opportunistic check in a future SIS version). The same map may be shared by all iterators
+      * on the same {@link Table}, but {@link WeakValueHashMap} already provides the required synchronizations.
+      *
+      * <p>This {@code Features} class does not require the identifiers to be built from primary key columns.
+      * However if this map has been provided by {@link Table#instanceForPrimaryKeys()}, then the identifiers
+      * need to be primary keys with columns in the exact same order for allowing the same map to be shared.</p>
+      */
+     private final WeakValueHashMap<?,Object> instances;
+ 
+     /**
+      * The component class of the keys in the {@link #instances} map, or {@code null} if the keys are not array.
+      * For example if a primary key is made of two columns of type {@code String}, then this field may be set to
+      * {@code String}.
+      */
+     private final Class<?> keyComponentClass;
+ 
+     /**
+      * Estimated number of rows, or {@literal <= 0} if unknown.
+      */
+     private final long estimatedSize;
+ 
+     /**
+      * Creates a new iterator over the feature instances.
+      *
+      * @param table             the table for which we are creating an iterator.
+      * @param connection        connection to the database.
+      * @param attributeNames    value of {@link Table#attributeNames}:   where to store simple values.
+      * @param attributeColumns  value of {@link Table#attributeColumns}: often the same as attribute names.
+      * @param importedKeys      value of {@link Table#importedKeys}:     targets of this table foreign keys.
+      * @param exportedKeys      value of {@link Table#exportedKeys}:     foreigner keys of other tables.
+      * @param following         the relations that we are following. Used for avoiding never ending loop.
+      * @param noFollow          relation to not follow, or {@code null} if none.
+      */
+     Features(final Table table, final Connection connection, final String[] attributeNames, final String[] attributeColumns,
+              final Relation[] importedKeys, final Relation[] exportedKeys, final List<Relation> following, final Relation noFollow)
+              throws SQLException, InternalDataStoreException
+     {
+         this.featureType = table.featureType;
+         this.attributeNames = attributeNames;
+         final DatabaseMetaData metadata = connection.getMetaData();
+         estimatedSize = following.isEmpty() ? table.countRows(metadata, true) : 0;
+         final SQLBuilder sql = new SQLBuilder(metadata, true).append("SELECT");
+         final Map<String,Integer> columnIndices = new HashMap<>();
+         /*
+          * Create a SELECT clause with all columns that are ordinary attributes.
+          * Order matter, since 'Features' iterator will map the columns to the
+          * attributes listed in the 'attributeNames' array in that order.
+          */
+         for (String column : attributeColumns) {
+             appendColumn(sql, column, columnIndices);
+         }
+         /*
+          * Collect information about associations in local arrays before to assign
+          * them to the final fields, because some array lengths may be adjusted.
+          */
+         int importCount = (importedKeys != null) ? importedKeys.length : 0;
+         int exportCount = (exportedKeys != null) ? exportedKeys.length : 0;
+         int totalCount  = importCount + exportCount;
+         if (totalCount == 0) {
+             dependencies        = EMPTY;
+             associationNames    = null;
+             foreignerKeyIndices = null;
+             deferredAssociation = null;
+         } else {
+             String deferredAssociation = null;
+             final Features[]     dependencies = new Features[totalCount];
+             final String[]   associationNames = new String  [totalCount];
+             final int[][] foreignerKeyIndices = new int     [totalCount][];
+             /*
+              * For each foreigner key to another table, append all columns of that foreigner key
+              * and the name of the single feature property where the association will be stored.
+              */
+             if (importCount != 0) {
+                 importCount = 0;                                                    // We will recount.
+                 for (final Relation dependency : importedKeys) {
+                     if (dependency != noFollow) {
+                         dependency.startFollowing(following);                       // Safety against never-ending recursivity.
+                         associationNames   [importCount] = dependency.propertyName;
+                         foreignerKeyIndices[importCount] = getColumnIndices(sql, dependency.getForeignerKeys(), columnIndices);
+                         dependencies       [importCount] = dependency.getSearchTable().features(connection, following, noFollow);
+                         dependency.endFollowing(following);
+                         importCount++;
+                     } else {
+                         deferredAssociation = dependency.propertyName;
+                     }
+                 }
+             }
+             /*
+              * Create iterators for other tables that reference the primary keys of this table. For example
+              * if we have a "City" feature with attributes for the city name, population, etc. and a "Parks"
+              * feature referencing the city where the park is located, in order to populate the "City.parks"
+              * associations we need to iterate over all "Parks" rows referencing the city.
+              */
+             if (exportCount != 0) {
+                 int i = importCount;
+                 for (final Relation dependency : exportedKeys) {
+                     dependency.startFollowing(following);                   // Safety against never-ending recursivity.
+                     final Table foreigner  = dependency.getSearchTable();
+                     final Relation inverse = foreigner.getInverseOf(dependency, table.name);
+                     associationNames   [i] = dependency.propertyName;
+                     foreignerKeyIndices[i] = getColumnIndices(sql, dependency.getForeignerKeys(), columnIndices);
+                     dependencies       [i] = foreigner.features(connection, following, inverse);
+                     dependency.endFollowing(following);
+                     i++;
+                 }
+             }
+             totalCount = importCount + exportCount;
+             this.dependencies        = ArraysExt.resize(dependencies,        totalCount);
+             this.associationNames    = ArraysExt.resize(associationNames,    totalCount);
+             this.foreignerKeyIndices = ArraysExt.resize(foreignerKeyIndices, totalCount);
+             this.deferredAssociation = deferredAssociation;
+         }
+         this.importCount = importCount;
+         /*
+          * Create a Statement if we don't need any condition, or a PreparedStatement if we need to add
+          * a "WHERE" clause. In the later case, we will cache the features already created if there is
+          * a possibility that many rows reference the same feature instance.
+          */
+         sql.append(" FROM ").appendIdentifier(table.name.catalog, table.name.schema, table.name.table);
+         if (following.isEmpty()) {
+             statement = null;
+             instances = null;       // A future SIS version could use the map opportunistically if it exists.
+             keyComponentClass = null;
+             result = connection.createStatement().executeQuery(sql.toString());
+         } else {
+             final Relation componentOf = following.get(following.size() - 1);
+             String separator = " WHERE ";
+             for (String primaryKey : componentOf.getSearchColumns()) {
+                 sql.append(separator).appendIdentifier(primaryKey).append("=?");
+                 separator = " AND ";
+             }
+             statement = connection.prepareStatement(sql.toString());
+             /*
+              * Following assumes that the foreigner key references the primary key of this table,
+              * in which case 'table.primaryKeyClass' should never be null. This assumption may not
+              * hold if the relation has been defined by DatabaseMetaData.getCrossReference(…) instead.
+              */
+             if (componentOf.useFullKey()) {
+                 instances = table.instanceForPrimaryKeys();
+                 keyComponentClass = table.primaryKeyClass.getComponentType();
+             } else {
+                 instances = new WeakValueHashMap<>(Object.class);       // Can not share the table cache.
+                 keyComponentClass = Object.class;
+             }
+         }
+     }
+ 
+     /**
+      * Appends a columns in the given builder and remember the column indices.
+      * An exception is thrown if the column has already been added (should never happen).
+      */
+     private static int appendColumn(final SQLBuilder sql, final String column,
+             final Map<String,Integer> columnIndices) throws InternalDataStoreException
+     {
+         int columnCount = columnIndices.size();
+         if (columnCount != 0) sql.append(',');
+         sql.append(' ').appendIdentifier(column);
+         if (columnIndices.put(column, ++columnCount) == null) return columnCount;
+         throw new InternalDataStoreException(Resources.format(Resources.Keys.DuplicatedColumn_1, column));
+     }
+ 
+     /**
+      * Computes the 1-based indices of given columns, adding the columns in the given builder if necessary.
+      */
+     private static int[] getColumnIndices(final SQLBuilder sql, final Collection<String> columns,
+             final Map<String,Integer> columnIndices) throws InternalDataStoreException
+     {
+         int i = 0;
+         final int[] indices = new int[columns.size()];
+         for (final String column : columns) {
+             final Integer pos = columnIndices.get(column);
+             indices[i++] = (pos != null) ? pos : appendColumn(sql, column, columnIndices);
+         }
+         return indices;
+     }
+ 
+     /**
+      * Returns an array of the given length capable to hold the identifier,
+      * or {@code null} if there is no need for an array.
+      */
+     private Object identifierArray(final int columnCount) {
+         return (columnCount > 1) ? Array.newInstance(keyComponentClass, columnCount) : null;
+     }
+ 
+     /**
+      * Declares that this iterator never returns {@code null} elements.
+      */
+     @Override
+     public int characteristics() {
+         return NONNULL;
+     }
+ 
+     /**
+      * Returns the estimated number of features, or {@link Long#MAX_VALUE} if unknown.
+      */
+     @Override
+     public long estimateSize() {
+         return (estimatedSize > 0) ? estimatedSize : Long.MAX_VALUE;
+     }
+ 
+     /**
+      * Current version does not support split.
+      *
+      * @return always {@code null}.
+      */
+     @Override
 -    public Spliterator<Feature> trySplit() {
++    public Spliterator<AbstractFeature> trySplit() {
+         return null;
+     }
+ 
+     /**
+      * Gives the next feature to the given consumer.
+      */
+     @Override
 -    public boolean tryAdvance(final Consumer<? super Feature> action) {
++    public boolean tryAdvance(final Consumer<? super AbstractFeature> action) {
+         try {
+             return fetch(action, false);
+         } catch (SQLException e) {
+             throw new BackingStoreException(e);
+         }
+     }
+ 
+     /**
+      * Gives all remaining features to the given consumer.
+      */
+     @Override
 -    public void forEachRemaining(final Consumer<? super Feature> action) {
++    public void forEachRemaining(final Consumer<? super AbstractFeature> action) {
+         try {
+             fetch(action, true);
+         } catch (SQLException e) {
+             throw new BackingStoreException(e);
+         }
+     }
+ 
+     /**
+      * Gives at least the next feature to the given consumer.
+      * Gives all remaining features if {@code all} is {@code true}.
+      *
 -     * @param  action  the action to execute for each {@link Feature} instances fetched by this method.
++     * @param  action  the action to execute for each {@link AbstractFeature} instances fetched by this method.
+      * @param  all     {@code true} for reading all remaining feature instances, or {@code false} for only the next one.
+      * @return {@code true} if we have read an instance and {@code all} is {@code false} (so there is maybe other instances).
+      */
 -    private boolean fetch(final Consumer<? super Feature> action, final boolean all) throws SQLException {
++    private boolean fetch(final Consumer<? super AbstractFeature> action, final boolean all) throws SQLException {
+         while (result.next()) {
 -            final Feature feature = featureType.newInstance();
++            final AbstractFeature feature = featureType.newInstance();
+             for (int i=0; i < attributeNames.length; i++) {
+                 final Object value = result.getObject(i+1);
+                 if (!result.wasNull()) {
+                     feature.setPropertyValue(attributeNames[i], value);
+                 }
+             }
+             for (int i=0; i < dependencies.length; i++) {
+                 final Features dependency = dependencies[i];
+                 final int[] columnIndices = foreignerKeyIndices[i];
+                 final Object value;
+                 if (i < importCount) {
+                     /*
+                      * Relation.Direction.IMPORT: this table contains the foreigner keys.
+                      *
+                      * If the foreigner key uses only one column, we will store the foreigner key value
+                      * in the 'key' variable without creating array. But if the foreigner key uses more
+                      * than one column, then we need to create an array holding all values.
+                      */
+                     Object key = null;
+                     final Object keys = dependency.identifierArray(columnIndices.length);
+                     for (int p=0; p < columnIndices.length;) {
+                         key = result.getObject(columnIndices[p]);
+                         if (keys != null) Array.set(keys, p, key);
+                         dependency.statement.setObject(++p, key);
+                     }
+                     if (keys != null) key = keys;
+                     value = dependency.fetchReferenced(key, null);
+                 } else {
+                     /*
+                      * Relation.Direction.EXPORT: another table references this table.
+                      *
+                      * 'key' must stay null because we do not cache those dependencies.
+                      * The reason is that this direction can return a lot of instances,
+                      * contrarily to Direction.IMPORT which return only one instance.
+                      * Furthermore instances fetched from Direction.EXPORT can not be
+                      * shared by feature instances, so caching would be useless here.
+                      */
+                     for (int p=0; p < columnIndices.length;) {
+                         final Object k = result.getObject(columnIndices[p]);
+                         dependency.statement.setObject(++p, k);
+                     }
+                     value = dependency.fetchReferenced(null, feature);
+                 }
+                 feature.setPropertyValue(associationNames[i], value);
+             }
+             action.accept(feature);
+             if (!all) return true;
+         }
+         return false;
+     }
+ 
+     /**
+      * Executes the current {@link #statement} and stores all features in a list.
+      * Returns {@code null} if there is no feature, or returns the feature instance
+      * if there is only one such instance, or returns a list of features otherwise.
+      *
+      * @param  key    the key to use for referencing the feature in the cache, or {@code null} for no caching.
+      * @param  owner  if the features to fetch are components of another feature, that container feature instance.
+      * @return the feature as a singleton {@code Feature} or as a {@code Collection<Feature>}.
+      */
 -    private Object fetchReferenced(final Object key, final Feature owner) throws SQLException {
++    private Object fetchReferenced(final Object key, final AbstractFeature owner) throws SQLException {
+         if (key != null) {
+             Object existing = instances.get(key);
+             if (existing != null) {
+                 return existing;
+             }
+         }
 -        final List<Feature> features = new ArrayList<>();
++        final List<AbstractFeature> features = new ArrayList<>();
+         try (ResultSet r = statement.executeQuery()) {
+             result = r;
+             fetch(features::add, true);
+         } finally {
+             result = null;
+         }
+         if (owner != null && deferredAssociation != null) {
 -            for (final Feature feature : features) {
++            for (final AbstractFeature feature : features) {
+                 feature.setPropertyValue(deferredAssociation, owner);
+             }
+         }
+         Object feature;
+         switch (features.size()) {
+             case 0:  feature = null; break;
+             case 1:  feature = features.get(0); break;
+             default: feature = features; break;
+         }
+         if (key != null) {
+             @SuppressWarnings("unchecked")          // Check is performed by putIfAbsent(…).
+             final Object previous = ((WeakValueHashMap) instances).putIfAbsent(key, feature);
+             if (previous != null) {
+                 feature = previous;
+             }
+         }
+         return feature;
+     }
+ 
+     /**
+      * Closes the (pooled) connection, including the statements of all dependencies.
+      */
+     private void close() throws SQLException {
+         /*
+          * Only one of 'statement' and 'result' should be non-null. The connection should be closed
+          * by the 'Features' instance having a non-null 'result' because it is the main one created
+          * by 'Table.features(boolean)' method. The other 'Features' instances are dependencies.
+          */
+         if (statement != null) {
+             statement.close();
+         }
+         final ResultSet r = result;
+         if (r != null) {
+             result = null;
+             final Statement s = r.getStatement();
+             try (Connection c = s.getConnection()) {
+                 r.close();      // Implied by s.close() according JDBC javadoc, but we are paranoiac.
+                 s.close();
+                 for (final Features dependency : dependencies) {
+                     dependency.close();
+                 }
+             }
+         }
+     }
+ 
+     /**
+      * Closes the (pooled) connection, including the statements of all dependencies.
+      * This is a handler to be invoked by {@link java.util.stream.Stream#close()}.
+      */
+     @Override
+     public void run() {
+         try {
+             close();
+         } catch (SQLException e) {
+             throw new BackingStoreException(e);
+         }
+     }
+ }
diff --cc storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Table.java
index 0000000,70a6b73..b7fe0e0
mode 000000,100644..100644
--- a/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Table.java
+++ b/storage/sis-sqlstore/src/main/java/org/apache/sis/internal/sql/feature/Table.java
@@@ -1,0 -1,625 +1,624 @@@
+ /*
+  * Licensed to the Apache Software Foundation (ASF) under one or more
+  * contributor license agreements.  See the NOTICE file distributed with
+  * this work for additional information regarding copyright ownership.
+  * The ASF licenses this file to You under the Apache License, Version 2.0
+  * (the "License"); you may not use this file except in compliance with
+  * the License.  You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+ package org.apache.sis.internal.sql.feature;
+ 
+ import java.util.Map;
+ import java.util.HashMap;
+ import java.util.LinkedHashMap;
+ import java.util.List;
+ import java.util.ArrayList;
+ import java.util.Collection;
+ import java.util.stream.Stream;
+ import java.util.stream.StreamSupport;
+ import java.sql.DatabaseMetaData;
+ import java.sql.Connection;
+ import java.sql.ResultSet;
+ import java.sql.SQLException;
+ import javax.sql.DataSource;
+ import org.opengis.util.GenericName;
+ import org.opengis.referencing.crs.CoordinateReferenceSystem;
+ import org.apache.sis.feature.builder.AttributeRole;
+ import org.apache.sis.feature.builder.AttributeTypeBuilder;
+ import org.apache.sis.feature.builder.AssociationRoleBuilder;
+ import org.apache.sis.feature.builder.FeatureTypeBuilder;
+ import org.apache.sis.internal.feature.Geometries;
+ import org.apache.sis.storage.DataStoreException;
+ import org.apache.sis.storage.DataStoreContentException;
+ import org.apache.sis.storage.InternalDataStoreException;
+ import org.apache.sis.internal.metadata.sql.Reflection;
+ import org.apache.sis.internal.metadata.sql.SQLUtilities;
+ import org.apache.sis.internal.storage.AbstractFeatureSet;
+ import org.apache.sis.internal.util.CollectionsExt;
+ import org.apache.sis.util.collection.WeakValueHashMap;
+ import org.apache.sis.util.collection.TreeTable;
+ import org.apache.sis.util.CharSequences;
+ import org.apache.sis.util.Exceptions;
+ import org.apache.sis.util.Classes;
+ import org.apache.sis.util.Numbers;
+ import org.apache.sis.util.Debug;
+ 
+ // Branch-dependent imports
 -import org.opengis.feature.Feature;
 -import org.opengis.feature.FeatureType;
 -import org.opengis.feature.AttributeType;
 -import org.opengis.feature.FeatureAssociationRole;
++import org.apache.sis.feature.AbstractFeature;
++import org.apache.sis.feature.DefaultFeatureType;
++import org.apache.sis.feature.DefaultAssociationRole;
+ 
+ 
+ /**
+  * Description of a table in the database, including columns, primary keys and foreigner keys.
 - * This class contains a {@link FeatureType} inferred from the table structure. The {@link FeatureType}
 - * contains an {@link AttributeType} for each table column, except foreigner keys which are represented
++ * This class contains a {@code FeatureType} inferred from the table structure. The {@code FeatureType}
++ * contains an {@code AttributeType} for each table column, except foreigner keys which are represented
+  * by {@link FeatureAssociationRole}s.
+  *
+  * @author  Johann Sorel (Geomatys)
+  * @author  Martin Desruisseaux (Geomatys)
+  * @version 1.0
+  * @since   1.0
+  * @module
+  */
+ final class Table extends AbstractFeatureSet {
+     /**
+      * Provider of (pooled) connections to the database.
+      */
+     private final DataSource source;
+ 
+     /**
+      * The structure of this table represented as a feature. Each feature attribute is a table column,
+      * except synthetic attributes like "sis:identifier". The feature may also contain associations
+      * inferred from foreigner keys that are not immediately apparent in the table.
+      */
 -    final FeatureType featureType;
++    final DefaultFeatureType featureType;
+ 
+     /**
+      * The name in the database of this {@code Table} object, together with its schema and catalog.
+      */
+     final TableReference name;
+ 
+     /**
+      * Name of attributes in feature instances, excluding operations and associations to other tables.
+      * Those names are in the order of columns declared in the {@code SELECT <columns} statement.
+      * This array shall not be modified after construction.
+      */
+     private final String[] attributeNames;
+ 
+     /**
+      * Name of columns corresponding to each {@link #attributeNames}. This is often a reference to the
+      * same array than {@link #attributeNames}, but may be different if some attributes have been renamed
+      * for avoiding name collisions.
+      */
+     private final String[] attributeColumns;
+ 
+     /**
+      * The columns that constitute the primary key, or {@code null} if there is no primary key.
+      */
+     private final String[] primaryKeys;
+ 
+     /**
+      * The primary keys of other tables that are referenced by this table foreign key columns.
+      * They are 0:1 relations. May be {@code null} if there is no imported keys.
+      */
+     private final Relation[] importedKeys;
+ 
+     /**
+      * The foreign keys of other tables that reference this table primary key columns.
+      * They are 0:N relations. May be {@code null} if there is no exported keys.
+      */
+     private final Relation[] exportedKeys;
+ 
+     /**
+      * The class of primary key values, or {@code null} if there is no primary keys.
+      * If the primary keys use more than one column, then this field is the class of
+      * an array; it may be an array of primitive type.
+      */
+     final Class<?> primaryKeyClass;
+ 
+     /**
+      * Feature instances already created for given primary keys. This map is used only when requesting feature
+      * instances by identifiers (not for iterating over all features) and those identifiers are primary keys.
+      * We create this map only for tables referenced by foreigner keys of other tables as enumerated by the
+      * {@link Relation.Direction#IMPORT} and {@link Relation.Direction#EXPORT} cases; not for arbitrary
+      * cross-reference cases. Values are usually {@code Feature} instances, but may also be {@code Collection<Feature>}.
+      *
+      * @see #instanceForPrimaryKeys()
+      */
+     private WeakValueHashMap<?,Object> instanceForPrimaryKeys;
+ 
+     /**
+      * {@code true} if this table contains at least one geometry column.
+      */
+     final boolean hasGeometry;
+ 
+     /**
+      * Creates a description of the table of the given name.
+      * The table is identified by {@code id}, which contains a (catalog, schema, name) tuple.
+      * The catalog and schema parts are optional and can be null, but the table is mandatory.
+      *
+      * @param  analyzer    helper functions, e.g. for converting SQL types to Java types.
+      * @param  id          the catalog, schema and table name of the table to analyze.
+      * @param  importedBy  if this table is imported by the foreigner keys of another table,
+      *                     the parent table. Otherwise {@code null}.
+      */
+     Table(final Analyzer analyzer, final TableReference id, final TableReference importedBy)
+             throws SQLException, DataStoreException
+     {
+         super(analyzer.listeners);
+         this.source = analyzer.source;
+         this.name   = id;
+         final String tableEsc  = analyzer.escape(id.table);
+         final String schemaEsc = analyzer.escape(id.schema);
+         /*
+          * Get a list of primary keys. We need to know them before to create the attributes,
+          * in order to detect which attributes are used as components of Feature identifiers.
+          * In the 'primaryKeys' map, the boolean tells whether the column uses auto-increment,
+          * with null value meaning that we don't know.
+          *
+          * Note: when a table contains no primary keys, we could still look for index columns
+          * with unique constraint using metadata.getIndexInfo(catalog, schema, table, true).
+          * We don't do that for now because of uncertainties (which index to use if there is
+          * many? If they are suitable as identifiers why they are not primary keys?).
+          */
+         final Map<String,Boolean> primaryKeys = new LinkedHashMap<>();
+         try (ResultSet reflect = analyzer.metadata.getPrimaryKeys(id.catalog, id.schema, id.table)) {
+             while (reflect.next()) {
+                 primaryKeys.put(analyzer.getUniqueString(reflect, Reflection.COLUMN_NAME), null);
+                 // The actual Boolean value will be fetched in the loop on columns later.
+             }
+         }
+         this.primaryKeys = primaryKeys.isEmpty() ? null : primaryKeys.keySet().toArray(new String[primaryKeys.size()]);
+         /*
+          * Creates a list of associations between the table read by this method and other tables.
+          * The associations are defined by the foreigner keys referencing primary keys. Note that
+          * the table relations can be defined in both ways:  the foreigner keys of this table may
+          * be referencing the primary keys of other tables (Direction.IMPORT) or the primary keys
+          * of this table may be referenced by the foreigner keys of other tables (Direction.EXPORT).
+          * However in both case, we will translate that into associations from this table to the
+          * other tables. We can not rely on IMPORT versus EXPORT for determining the association
+          * navigability because the database designer's choice may be driven by the need to support
+          * multi-occurrences.
+          */
+         final List<Relation> importedKeys = new ArrayList<>();
+         final Map<String, List<Relation>> foreignerKeys = new HashMap<>();
+         try (ResultSet reflect = analyzer.metadata.getImportedKeys(id.catalog, id.schema, id.table)) {
+             if (reflect.next()) do {
+                 Relation relation = new Relation(analyzer, Relation.Direction.IMPORT, reflect);
+                 importedKeys.add(relation);
+                 for (final String column : relation.getForeignerKeys()) {
+                     CollectionsExt.addToMultiValuesMap(foreignerKeys, column, relation);
+                     relation = null;     // Only the first column will be associated.
+                 }
+             } while (!reflect.isClosed());
+         }
+         final List<Relation> exportedKeys = new ArrayList<>();
+         try (ResultSet reflect = analyzer.metadata.getExportedKeys(id.catalog, id.schema, id.table)) {
+             if (reflect.next()) do {
+                 final Relation export = new Relation(analyzer, Relation.Direction.EXPORT, reflect);
+                 if (!export.equals(importedBy)) {
+                     exportedKeys.add(export);
+                 }
+             } while (!reflect.isClosed());
+         }
+         /*
+          * For each column in the table that is not a foreigner key, create an AttributeType of the same name.
+          * The Java type is inferred from the SQL type, and the attribute cardinality in inferred from the SQL
+          * nullability. Attribute names are added in the 'attributeNames' and 'attributeColumns' list. Those
+          * names are usually the same, except when a column is used both as a primary key and as foreigner key.
+          */
+         Class<?> primaryKeyClass   = null;
+         boolean  primaryKeyNonNull = true;
+         boolean  hasGeometry       = false;
+         int startWithLowerCase     = 0;
+         final List<String> attributeNames = new ArrayList<>();
+         final List<String> attributeColumns = new ArrayList<>();
+         final FeatureTypeBuilder feature = new FeatureTypeBuilder(analyzer.nameFactory, analyzer.functions.library, analyzer.locale);
+         try (ResultSet reflect = analyzer.metadata.getColumns(id.catalog, schemaEsc, tableEsc, null)) {
+             while (reflect.next()) {
+                 final String         column       = analyzer.getUniqueString(reflect, Reflection.COLUMN_NAME);
+                 final boolean        mandatory    = Boolean.FALSE.equals(SQLUtilities.parseBoolean(reflect.getString(Reflection.IS_NULLABLE)));
+                 final boolean        isPrimaryKey = primaryKeys.containsKey(column);
+                 final List<Relation> dependencies = foreignerKeys.get(column);
+                 /*
+                  * Heuristic rule for determining if the column names starts with lower case or upper case.
+                  * Words that are all upper-case are ignored on the assumption that they are acronyms.
+                  */
+                 if (!column.isEmpty()) {
+                     final int firstLetter = column.codePointAt(0);
+                     if (Character.isLowerCase(firstLetter)) {
+                         startWithLowerCase++;
+                     } else if (Character.isUpperCase(firstLetter) && !CharSequences.isUpperCase(column)) {
+                         startWithLowerCase--;
+                     }
+                 }
+                 /*
+                  * Add the column as an attribute. Foreign keys are excluded (they will be replaced by associations),
+                  * except if the column is also a primary key. In the later case we need to keep that column because
+                  * it is needed for building the feature identifier.
+                  */
+                 AttributeTypeBuilder<?> attribute = null;
+                 if (isPrimaryKey || dependencies == null) {
+                     attributeNames.add(column);
+                     attributeColumns.add(column);
+                     final String typeName = reflect.getString(Reflection.TYPE_NAME);
+                     Class<?> type = analyzer.functions.toJavaType(reflect.getInt(Reflection.DATA_TYPE), typeName);
+                     if (type == null) {
+                         analyzer.warning(Resources.Keys.UnknownType_1, typeName);
+                         type = Object.class;
+                     }
+                     attribute = feature.addAttribute(type).setName(column);
+                     if (CharSequence.class.isAssignableFrom(type)) {
+                         final int size = reflect.getInt(Reflection.COLUMN_SIZE);
+                         if (!reflect.wasNull()) {
+                             attribute.setMaximalLength(size);
+                         }
+                     }
+                     if (!mandatory) {
+                         attribute.setMinimumOccurs(0);
+                     }
+                     /*
+                      * Some columns have special purposes: components of primary keys will be used for creating
+                      * identifiers, some columns may contain a geometric object. Adding a role on those columns
+                      * may create synthetic columns, for example "sis:identifier".
+                      */
+                     if (isPrimaryKey) {
+                         attribute.addRole(AttributeRole.IDENTIFIER_COMPONENT);
+                         primaryKeyNonNull &= mandatory;
+                         primaryKeyClass = Classes.findCommonClass(primaryKeyClass, type);
+                         if (primaryKeys.put(column, SQLUtilities.parseBoolean(reflect.getString(Reflection.IS_AUTOINCREMENT))) != null) {
+                             throw new DataStoreContentException(Resources.forLocale(analyzer.locale)
+                                     .getString(Resources.Keys.DuplicatedColumn_1, column));
+                         }
+                     }
+                     if (Geometries.isKnownType(type)) {
+                         final CoordinateReferenceSystem crs = analyzer.functions.createGeometryCRS(reflect);
+                         if (crs != null) {
+                             attribute.setCRS(crs);
+                         }
+                         if (!hasGeometry) {
+                             hasGeometry = true;
+                             attribute.addRole(AttributeRole.DEFAULT_GEOMETRY);
+                         }
+                     }
+                 }
+                 /*
+                  * If the column is a foreigner key, insert an association to another feature instead.
+                  * If the foreigner key uses more than one column, only one of those columns will become
+                  * an association and other columns will be omitted from the FeatureType (but there will
+                  * still be used in SQL queries). Note that columns may be used by more than one relation.
+                  */
+                 if (dependencies != null) {
+                     int count = 0;
+                     for (final Relation dependency : dependencies) {
+                         if (dependency != null) {
+                             final GenericName typeName = dependency.getName(analyzer);
+                             final Table table = analyzer.table(dependency, typeName, id);
+                             /*
+                              * Use the column name as the association name, provided that the foreigner key
+                              * use only that column. If the foreigner key use more than one column, then we
+                              * do not know which column describes better the association (often there is none).
+                              * In such case we use the foreigner key name as a fallback.
+                              */
+                             dependency.setPropertyName(column, count++);
+                             final AssociationRoleBuilder association;
+                             if (table != null) {
+                                 dependency.setSearchTable(analyzer, table, table.primaryKeys, Relation.Direction.IMPORT);
+                                 association = feature.addAssociation(table.featureType);
+                             } else {
+                                 association = feature.addAssociation(typeName);     // May happen in case of cyclic dependency.
+                             }
+                             association.setName(dependency.propertyName);
+                             if (!mandatory) {
+                                 association.setMinimumOccurs(0);
+                             }
+                             /*
+                              * If the column is also used in the primary key, then we have a name clash.
+                              * Rename the primary key column with the addition of a "pk:" scope. We rename
+                              * the primary key column instead than this association because the primary key
+                              * column should rarely be used directly.
+                              */
+                             if (attribute != null) {
+                                 attribute.setName(analyzer.nameFactory.createGenericName(null, "pk", column));
+                                 attributeNames.set(attributeNames.size() - 1, attribute.getName().toString());
+                                 attribute = null;
+                             }
+                         }
+                     }
+                 }
+             }
+         }
+         /*
+          * Add the associations created by other tables having foreigner keys to this table.
+          * We infer the column name from the target type. We may have a name clash with other
+          * columns, in which case an arbitrary name change is applied.
+          */
+         int count = 0;
+         for (final Relation dependency : exportedKeys) {
+             if (dependency != null) {
+                 final GenericName typeName = dependency.getName(analyzer);
+                 String propertyName = typeName.tip().toString();
+                 if (startWithLowerCase > 0) {
+                     final CharSequence words = CharSequences.camelCaseToWords(propertyName, true);
+                     final int first = Character.codePointAt(words, 0);
+                     propertyName = new StringBuilder(words.length())
+                             .appendCodePoint(Character.toLowerCase(first))
+                             .append(words, Character.charCount(first), words.length())
+                             .toString();
+                 }
+                 final String base = propertyName;
+                 while (feature.isNameUsed(propertyName)) {
+                     propertyName = base + '-' + ++count;
+                 }
+                 dependency.propertyName = propertyName;
+                 final Table table = analyzer.table(dependency, typeName, null);   // 'null' because exported, not imported.
+                 final AssociationRoleBuilder association;
+                 if (table != null) {
+                     dependency.setSearchTable(analyzer, table, this.primaryKeys, Relation.Direction.EXPORT);
+                     association = feature.addAssociation(table.featureType);
+                 } else {
+                     association = feature.addAssociation(typeName);     // May happen in case of cyclic dependency.
+                 }
+                 association.setName(propertyName)
+                            .setMinimumOccurs(0)
+                            .setMaximumOccurs(Integer.MAX_VALUE);
+             }
+         }
+         /*
+          * If the primary keys uses more than one column, we will need an array to store it.
+          * If all columns are non-null numbers, use primitive arrays instead than array of wrappers.
+          */
+         if (primaryKeys.size() > 1) {
+             if (primaryKeyNonNull) {
+                 primaryKeyClass = Numbers.wrapperToPrimitive(primaryKeyClass);
+             }
+             primaryKeyClass = Classes.changeArrayDimension(primaryKeyClass, 1);
+         }
+         /*
+          * Global information on the feature type (name, remarks).
+          * The remarks are opportunistically stored in id.freeText if known by the caller.
+          */
+         feature.setName(id.getName(analyzer));
+         String remarks = id.freeText;
+         if (id instanceof Relation) {
+             try (ResultSet reflect = analyzer.metadata.getTables(id.catalog, schemaEsc, tableEsc, null)) {
+                 while (reflect.next()) {
+                     remarks = analyzer.getUniqueString(reflect, Reflection.REMARKS);
+                     if (remarks != null) {
+                         remarks = remarks.trim();
+                         if (remarks.isEmpty()) {
+                             remarks = null;
+                         } else break;
+                     }
+                 }
+             }
+         }
+         if (remarks != null) {
+             feature.setDefinition(remarks);
+         }
+         this.featureType      = feature.build();
+         this.importedKeys     = toArray(importedKeys);
+         this.exportedKeys     = toArray(exportedKeys);
+         this.primaryKeyClass  = primaryKeyClass;
+         this.hasGeometry      = hasGeometry;
+         this.attributeNames   = attributeNames.toArray(new String[attributeNames.size()]);
+         this.attributeColumns = attributeColumns.equals(attributeNames) ? this.attributeNames
+                               : attributeColumns.toArray(new String[attributeColumns.size()]);
+     }
+ 
+     /**
+      * Returns the given relations as an array, or {@code null} if none.
+      */
+     private static Relation[] toArray(final Collection<Relation> relations) {
+         final int size = relations.size();
+         return (size != 0) ? relations.toArray(new Relation[size]) : null;
+     }
+ 
+     /**
+      * Sets the search tables on all {@link Relation} instances for which this operation has been deferred.
+      * This happen when a table could not be obtained because of circular dependency. This method is invoked
+      * after all tables have been created in order to fill such holes.
+      *
+      * @param  tables  all tables created.
+      */
+     final void setDeferredSearchTables(final Analyzer analyzer, final Map<GenericName,Table> tables) throws DataStoreException {
+         for (final Relation.Direction direction : Relation.Direction.values()) {
+             final Relation[] relations;
+             switch (direction) {
+                 case IMPORT: relations = importedKeys; break;
+                 case EXPORT: relations = exportedKeys; break;
+                 default: continue;
+             }
+             if (relations != null) {
+                 for (final Relation relation : relations) {
+                     if (!relation.isSearchTableDefined()) {
+                         // A ClassCastException below would be a bug since 'relation.propertyName' shall be for an association.
 -                        FeatureAssociationRole association = (FeatureAssociationRole) featureType.getProperty(relation.propertyName);
++                        DefaultAssociationRole association = (DefaultAssociationRole) featureType.getProperty(relation.propertyName);
+                         final Table table = tables.get(association.getValueType().getName());
+                         if (table == null) {
+                             throw new InternalDataStoreException(association.toString());
+                         }
+                         final String[] referenced;
+                         switch (direction) {
+                             case IMPORT: referenced = table.primaryKeys; break;
+                             case EXPORT: referenced =  this.primaryKeys; break;
+                             default: throw new AssertionError(direction);
+                         }
+                         relation.setSearchTable(analyzer, table, referenced, direction);
+                     }
+                 }
+             }
+         }
+     }
+ 
+ 
+     // ────────────────────────────────────────────────────────────────────────────────────────
+     //     End of table construction. Next methods are for visualizing the table structure.
+     // ────────────────────────────────────────────────────────────────────────────────────────
+ 
+ 
+     /**
+      * Appends all children to the given parent. The children are added under the given node.
+      * If the children array is null, then this method does nothing.
+      *
+      * @param  parent    the node where to add children.
+      * @param  children  the children to add, or {@code null} if none.
+      * @param  arrow     the symbol to use for relating the columns of two tables in a foreigner key.
+      */
+     @Debug
+     private static void appendAll(final TreeTable.Node parent, final Relation[] children, final String arrow) {
+         if (children != null) {
+             for (final Relation child : children) {
+                 child.appendTo(parent, arrow);
+             }
+         }
+     }
+ 
+     /**
+      * Creates a tree representation of this table for debugging purpose.
+      *
+      * @param  parent  the parent node where to add the tree representation.
+      */
+     @Debug
+     final void appendTo(TreeTable.Node parent) {
+         parent = Relation.newChild(parent, featureType.getName().toString());
+         for (final String attribute : attributeNames) {
+             TableReference.newChild(parent, attribute);
+         }
+         appendAll(parent, importedKeys, " → ");
+         appendAll(parent, exportedKeys, " ← ");
+     }
+ 
+     /**
+      * Formats a graphical representation of this table for debugging purpose. This representation
+      * can be printed to the {@linkplain System#out standard output stream} (for example) if the
+      * output device uses a monospaced font and supports Unicode.
+      */
+     @Override
+     public String toString() {
+         return TableReference.toString(this, (n) -> appendTo(n));
+     }
+ 
+ 
+     // ────────────────────────────────────────────────────────────────────────────────────────
+     //     End of table structure visualization. Next methods are for fetching features.
+     // ────────────────────────────────────────────────────────────────────────────────────────
+ 
+ 
+     /**
+      * Returns the feature type inferred from the database structure analysis.
+      */
+     @Override
 -    public final FeatureType getType() {
++    public final DefaultFeatureType getType() {
+         return featureType;
+     }
+ 
+     /**
+      * If this table imports the inverse of the given relation, returns the imported relation.
+      * Otherwise returns {@code null}. This method is used for preventing infinite recursivity.
+      *
+      * @param  exported       the relation exported by another table.
+      * @param  exportedOwner  {@code exported.owner.name}: table that contains the {@code exported} relation.
+      * @return the inverse of the given relation, or {@code null} if none.
+      */
+     final Relation getInverseOf(final Relation exported, final TableReference exportedOwner) {
+         if (importedKeys != null && name.equals(exported)) {
+             for (final Relation relation : importedKeys) {
+                 if (relation.equals(exportedOwner) && relation.isInverseOf(exported)) {
+                     return relation;
+                 }
+             }
+         }
+         return null;
+     }
+ 
+     /**
+      * Returns a cache for fetching feature instances by identifier. The map is created when this method is
+      * first invoked. Keys are primary key values, typically as {@code String} or {@code Integer} instances
+      * or arrays of those if the keys use more than one column. Values are usually {@code Feature} instances,
+      * but may also be {@code Collection<Feature>}.
+      */
+     @SuppressWarnings("ReturnOfCollectionOrArrayField")
+     final synchronized WeakValueHashMap<?,Object> instanceForPrimaryKeys() {
+         if (instanceForPrimaryKeys == null) {
+             instanceForPrimaryKeys = new WeakValueHashMap<>(primaryKeyClass);
+         }
+         return instanceForPrimaryKeys;
+     }
+ 
+     /**
+      * Returns the number of rows, or -1 if unknown. Note that some database drivers returns 0,
+      * so it is better to consider 0 as "unknown" too. We do not cache this count because it may
+      * change at any time.
+      *
+      * @param  metadata     information about the database.
+      * @param  approximate  whether approximative or outdated values are acceptable.
+      * @return number of rows (may be approximative), or -1 if unknown.
+      */
+     final long countRows(final DatabaseMetaData metadata, final boolean approximate) throws SQLException {
+         long count = -1;
+         final String[] names = TableReference.splitName(featureType.getName());
+         try (ResultSet reflect = metadata.getIndexInfo(names[2], names[1], names[0], false, approximate)) {
+             while (reflect.next()) {
+                 final long n = reflect.getLong(Reflection.CARDINALITY);
+                 if (!reflect.wasNull()) {
+                     if (reflect.getShort(Reflection.TYPE) == DatabaseMetaData.tableIndexStatistic) {
+                         return n;       // "Index statistic" type provides the number of rows in the table.
+                     }
+                     if (n > count) {    // Other index types may be inaccurate.
+                         count = n;
+                     }
+                 }
+             }
+         }
+         return count;
+     }
+ 
+     /**
+      * Returns a stream of all features contained in this dataset.
+      *
+      * @param  parallel  {@code true} for a parallel stream (if supported), or {@code false} for a sequential stream.
+      * @return all features contained in this dataset.
+      * @throws DataStoreException if an error occurred while creating the stream.
+      */
+     @Override
 -    public Stream<Feature> features(final boolean parallel) throws DataStoreException {
++    public Stream<AbstractFeature> features(final boolean parallel) throws DataStoreException {
+         DataStoreException ex;
+         Connection connection = null;
+         try {
+             connection = source.getConnection();
+             final Features iter = features(connection, new ArrayList<>(), null);
+             return StreamSupport.stream(iter, parallel).onClose(iter);
+         } catch (SQLException cause) {
+             ex = new DataStoreException(Exceptions.unwrap(cause));
+         }
+         if (connection != null) try {
+             connection.close();
+         } catch (SQLException e) {
+             ex.addSuppressed(e);
+         }
+         throw ex;
+     }
+ 
+     /**
+      * Returns an iterator over the features.
+      *
+      * @param connection  connection to the database.
+      * @param following   the relations that we are following. Used for avoiding never ending loop.
+      * @param noFollow    relation to not follow, or {@code null} if none.
+      */
+     final Features features(final Connection connection, final List<Relation> following, final Relation noFollow)
+             throws SQLException, InternalDataStoreException
+     {
+         return new Features(this, connection, attributeNames, attributeColumns, importedKeys, exportedKeys, following, noFollow);
+     }
+ }
diff --cc storage/sis-sqlstore/src/test/java/org/apache/sis/storage/sql/SQLStoreTest.java
index 0000000,7269f20..4d6f526
mode 000000,100644..100644
--- a/storage/sis-sqlstore/src/test/java/org/apache/sis/storage/sql/SQLStoreTest.java
+++ b/storage/sis-sqlstore/src/test/java/org/apache/sis/storage/sql/SQLStoreTest.java
@@@ -1,0 -1,270 +1,270 @@@
+ /*
+  * Licensed to the Apache Software Foundation (ASF) under one or more
+  * contributor license agreements.  See the NOTICE file distributed with
+  * this work for additional information regarding copyright ownership.
+  * The ASF licenses this file to You under the Apache License, Version 2.0
+  * (the "License"); you may not use this file except in compliance with
+  * the License.  You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+ package org.apache.sis.storage.sql;
+ 
+ import java.util.Map;
+ import java.util.HashMap;
+ import java.util.HashSet;
+ import java.util.Arrays;
+ import java.util.Collection;
+ import java.util.stream.Stream;
+ import org.apache.sis.storage.FeatureSet;
+ import org.apache.sis.storage.StorageConnector;
+ import org.apache.sis.test.sql.TestDatabase;
+ import org.apache.sis.test.TestCase;
+ import org.junit.Test;
+ 
+ import static org.apache.sis.test.Assert.*;
+ 
+ // Branch-dependent imports
 -import org.opengis.feature.Feature;
 -import org.opengis.feature.FeatureType;
 -import org.opengis.feature.PropertyType;
 -import org.opengis.feature.AttributeType;
 -import org.opengis.feature.FeatureAssociationRole;
++import org.apache.sis.feature.AbstractFeature;
++import org.apache.sis.feature.DefaultFeatureType;
++import org.apache.sis.feature.AbstractIdentifiedType;
++import org.apache.sis.feature.DefaultAttributeType;
++import org.apache.sis.feature.DefaultAssociationRole;
+ 
+ 
+ /**
+  * Tests {@link SQLStore}.
+  *
+  * @author  Martin Desruisseaux (Geomatys)
+  * @version 1.0
+  * @since   1.0
+  * @module
+  */
+ public final strictfp class SQLStoreTest extends TestCase {
+     /**
+      * The schema where will be stored the features to test.
+      */
+     private static final String SCHEMA = "features";
+ 
+     /**
+      * Number of time that the each country has been seen while iterating over the cities.
+      */
+     private final Map<String,Integer> countryCount = new HashMap<>();
+ 
+     /**
+      * The {@code Country} value for Canada, or {@code null} if not yet visited.
+      * This feature should appear twice, and all those occurrences should use the exact same instance.
+      * We use that for verifying the {@code Table.instanceForPrimaryKeys} caching.
+      */
 -    private Feature canada;
++    private AbstractFeature canada;
+ 
+     /**
+      * Tests on Derby.
+      *
+      * @throws Exception if an error occurred while testing the database.
+      */
+     @Test
+     public void testOnDerby() throws Exception {
+         test(TestDatabase.create("SQLStore"), true);
+     }
+ 
+     /**
+      * Tests on HSQLDB.
+      *
+      * @throws Exception if an error occurred while testing the database.
+      */
+     @Test
+     public void testOnHSQLDB() throws Exception {
+         test(TestDatabase.createOnHSQLDB("SQLStore", true), true);
+     }
+ 
+     /**
+      * Tests on PostgreSQL.
+      *
+      * @throws Exception if an error occurred while testing the database.
+      */
+     @Test
+     public void testOnPostgreSQL() throws Exception {
+         test(TestDatabase.createOnPostgreSQL(SCHEMA, true), false);
+     }
+ 
+     /**
+      * Tests reading an existing schema. The schema is created and populated by the {@code Features.sql} script.
+      *
+      * @param  inMemory  where the test database is in memory. If {@code true}, then the database is presumed
+      *                   initially empty: a schema will be created, and we assume that there is no ambiguity
+      *                   if we don't specify the schema in {@link SQLStore} constructor.
+      */
+     private void test(final TestDatabase database, final boolean inMemory) throws Exception {
+         final String[] scripts = {
+             "CREATE SCHEMA " + SCHEMA + ';',
+             "file:Features.sql"
+         };
+         if (!inMemory) {
+             scripts[0] = null;      // Erase the "CREATE SCHEMA" statement if the schema already exists.
+         }
+         try (TestDatabase tmp = database) {
+             tmp.executeSQL(SQLStoreTest.class, scripts);
+             try (SQLStore store = new SQLStore(new SQLStoreProvider(), new StorageConnector(tmp.source),
+                     SQLStoreProvider.createTableName(null, inMemory ? null : SCHEMA, "Cities")))
+             {
+                 final FeatureSet cities = (FeatureSet) store.findResource("Cities");
+                 verifyFeatureType(cities.getType(),
+                         new String[] {"sis:identifier", "pk:country", "country",   "native_name", "english_name", "population",  "parks"},
+                         new Object[] {null,             String.class, "Countries", String.class,  String.class,   Integer.class, "Parks"});
+ 
+                 verifyFeatureType(((FeatureSet) store.findResource("Countries")).getType(),
+                         new String[] {"sis:identifier", "code",       "native_name"},
+                         new Object[] {null,             String.class, String.class});
+ 
+                 verifyFeatureType(((FeatureSet) store.findResource("Parks")).getType(),
+                         new String[] {"sis:identifier", "pk:country", "FK_City", "city",       "native_name", "english_name"},
+                         new Object[] {null,             String.class, "Cities",  String.class, String.class,  String.class});
+ 
 -                try (Stream<Feature> features = cities.features(false)) {
++                try (Stream<AbstractFeature> features = cities.features(false)) {
+                     features.forEach((f) -> verifyContent(f));
+                 }
+             }
+         }
+         assertEquals(Integer.valueOf(2), countryCount.remove("CAN"));
+         assertEquals(Integer.valueOf(1), countryCount.remove("FRA"));
+         assertEquals(Integer.valueOf(1), countryCount.remove("JPN"));
+         assertTrue  (countryCount.isEmpty());
+     }
+ 
+     /**
+      * Verifies the result of analyzing the structure of the {@code "Cities"} table.
+      */
 -    private static void verifyFeatureType(final FeatureType type, final String[] expectedNames, final Object[] expectedTypes) {
++    private static void verifyFeatureType(final DefaultFeatureType type, final String[] expectedNames, final Object[] expectedTypes) {
+         int i = 0;
 -        for (PropertyType pt : type.getProperties(false)) {
++        for (AbstractIdentifiedType pt : type.getProperties(false)) {
+             assertEquals("name", expectedNames[i], pt.getName().toString());
+             final Object expectedType = expectedTypes[i];
+             if (expectedType != null) {
+                 final String label;
+                 final Object value;
+                 if (expectedType instanceof Class<?>) {
+                     label = "attribute type";
 -                    value = ((AttributeType<?>) pt).getValueClass();
++                    value = ((DefaultAttributeType<?>) pt).getValueClass();
+                 } else {
+                     label = "association type";
 -                    value = ((FeatureAssociationRole) pt).getValueType().getName().toString();
++                    value = ((DefaultAssociationRole) pt).getValueType().getName().toString();
+                 }
+                 assertEquals(label, expectedType, value);
+             }
+             i++;
+         }
+         assertEquals("count", expectedNames.length, i);
+     }
+ 
+     /**
+      * Verifies the content of the {@code Cities} table.
+      * The features are in no particular order.
+      */
 -    private void verifyContent(final Feature feature) {
++    private void verifyContent(final AbstractFeature feature) {
+         final String city = feature.getPropertyValue("native_name").toString();
+         final String country, countryName, englishName;
+         final String[] parks;
+         final int population;
+         boolean isCanada = false;
+         switch (city) {
+             case "東京": {
+                 englishName = "Tōkyō";
+                 country     = "JPN";
+                 countryName = "日本";
+                 population  = 13622267;         // In 2016.
+                 parks       = new String[] {"Yoyogi-kōen", "Shinjuku Gyoen"};
+                 break;
+             }
+             case "Paris": {
+                 englishName = "Paris";
+                 country     = "FRA";
+                 countryName = "France";
+                 population  = 2206488;          // In 2017.
+                 parks       = new String[] {"Tuileries Garden", "Luxembourg Garden"};
+                 break;
+             }
+             case "Montréal": {
+                 englishName = "Montreal";
+                 country     = "CAN";
+                 countryName = "Canada";
+                 population  = 1704694;          // In 2016.
+                 isCanada    = true;
+                 parks       = new String[] {"Mount Royal"};
+                 break;
+             }
+             case "Québec": {
+                 englishName = "Quebec";
+                 country     = "CAN";
+                 countryName = "Canada";
+                 population  = 531902;           // In 2016.
+                 isCanada    = true;
+                 parks = new String[] {};
+                 break;
+             }
+             default: {
+                 fail("Unexpected feature: " + city);
+                 return;
+             }
+         }
+         /*
+          * Verify attributes. They are the easiest properties to read.
+          */
+         assertEquals("pk:country",     country,              feature.getPropertyValue("pk:country"));
+         assertEquals("sis:identifier", country + ':' + city, feature.getPropertyValue("sis:identifier"));
+         assertEquals("english_name",   englishName,          feature.getPropertyValue("english_name"));
+         assertEquals("population",     population,           feature.getPropertyValue("population"));
+         /*
+          * Associations using Relation.Direction.IMPORT.
+          * Those associations should be cached; we verify with "Canada" case.
+          */
+         assertEquals("country", countryName, getIndirectPropertyValue(feature, "country", "native_name"));
+         if (isCanada) {
 -            final Feature f = (Feature) feature.getPropertyValue("country");
++            final AbstractFeature f = (AbstractFeature) feature.getPropertyValue("country");
+             if (canada == null) {
+                 canada = f;
+             } else {
+                 assertSame(canada, f);              // Want exact same feature instance, not just equal.
+             }
+         }
+         countryCount.merge(country, 1, (o, n) -> n+1);
+         /*
+          * Associations using Relation.Direction.EXPORT.
+          * Contrarily to the IMPORT case, those associations can contain many values.
+          */
+         final Collection<?> actualParks = (Collection<?>) feature.getPropertyValue("parks");
+         assertNotNull("parks", actualParks);
+         assertEquals("parks.length", parks.length, actualParks.size());
+         final Collection<String> expectedParks = new HashSet<>(Arrays.asList(parks));
+         for (final Object park : actualParks) {
 -            final Feature pf = (Feature) park;
++            final AbstractFeature pf = (AbstractFeature) park;
+             final String npn = (String) pf.getPropertyValue("native_name");
+             final String epn = (String) pf.getPropertyValue("english_name");
+             assertNotNull("park.native_name",  npn);
+             assertNotNull("park.english_name", epn);
+             assertNotEquals("park.names", npn, epn);
+             assertTrue("park.english_name", expectedParks.remove(epn));
+             /*
+              * Verify the reverse association form Parks to Cities.
+              * This create a cyclic graph, but SQLStore is capable to handle it.
+              */
+             assertSame("City → Park → City", feature, pf.getPropertyValue("FK_City"));
+         }
+     }
+ 
+     /**
+      * Follows an association in the given feature.
+      */
 -    private static Object getIndirectPropertyValue(final Feature feature, final String p1, final String p2) {
++    private static Object getIndirectPropertyValue(final AbstractFeature feature, final String p1, final String p2) {
+         final Object dependency = feature.getPropertyValue(p1);
+         assertNotNull(p1, dependency);
 -        assertInstanceOf(p1, Feature.class, dependency);
 -        return ((Feature) dependency).getPropertyValue(p2);
++        assertInstanceOf(p1, AbstractFeature.class, dependency);
++        return ((AbstractFeature) dependency).getPropertyValue(p2);
+     }
+ }
diff --cc storage/sis-storage/src/main/java/org/apache/sis/internal/storage/MetadataBuilder.java
index db5580f,22f33a9..935d0bd
--- a/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/MetadataBuilder.java
+++ b/storage/sis-storage/src/main/java/org/apache/sis/internal/storage/MetadataBuilder.java
@@@ -940,11 -959,8 +959,13 @@@ public class MetadataBuilder 
          if (abbreviation != null && abbreviation.length() != 0) {
              if (format == null) {
                  format = MetadataSource.getProvided().lookup(Format.class, abbreviation);
 +                /*
 +                 * Additional step for converting deprecated "name" and "specification" into non-deprecated properties.
 +                 * This step is not required on SIS branches that depend on development branches of GeoAPI 3.1 or 4.0.
 +                 */
 +                format = DefaultFormat.castOrCopy(format);
+             } else {
+                 addFormatName(abbreviation);
              }
          }
      }


Mime
View raw message