This is an automated email from the ASF dual-hosted git repository. desruisseaux pushed a commit to branch main in repository https://gitbox.apache.org/repos/asf/sis.git
commit 98bc0dfa207c7db3e226b9d99d34a33acd1dacdf Merge: 7a734d927a b6af5d3997 Author: Martin Desruisseaux <martin.desruisse...@geomatys.com> AuthorDate: Mon Dec 4 16:49:28 2023 +0100 Merge branch 'geoapi-3.1'. Includes partial work on Shapefile writer and fixes in CLI and JavaFX application starters. .../main/org/apache/sis/console/AboutCommand.java | 14 +- .../main/org/apache/sis/console/CRSCommand.java | 11 +- .../main/org/apache/sis/console/Command.java | 185 +++-- .../main/org/apache/sis/console/CommandRunner.java | 117 +++- .../org/apache/sis/console/Commands.properties | 3 +- .../org/apache/sis/console/Commands_fr.properties | 3 +- .../apache/sis/console/FormattedOutputCommand.java | 35 +- .../main/org/apache/sis/console/HelpCommand.java | 3 +- .../org/apache/sis/console/IdentifierCommand.java | 14 +- .../main/org/apache/sis/console/InfoCommand.java | 151 +++++ .../org/apache/sis/console/MetadataCommand.java | 12 +- .../org/apache/sis/console/MimeTypeCommand.java | 27 +- .../main/org/apache/sis/console/Option.java | 14 +- .../apache/sis/console/ResourcesDownloader.java | 44 +- .../main/org/apache/sis/console/SIS.java | 754 +++++++++++++++++++++ .../org/apache/sis/console/TransformCommand.java | 61 +- .../org/apache/sis/console/TranslateCommand.java | 13 +- .../org/apache/sis/console/AboutCommandTest.java | 20 +- .../org/apache/sis/console/CRSCommandTest.java | 20 +- .../org/apache/sis/console/CommandRunnerTest.java | 55 +- .../org/apache/sis/console/HelpCommandTest.java | 54 +- .../apache/sis/console/MetadataCommandTest.java | 14 +- .../apache/sis/console/MimeTypeCommandTest.java | 14 +- .../sis/storage/geotiff/ImageFileDirectory.java | 22 +- .../org/apache/sis/storage/gpx/StoreProvider.java | 2 +- .../org.apache.sis.storage/main/module-info.java | 1 + .../main/org/apache/sis/io/stream/IOUtilities.java | 22 +- .../src/org.apache.sis.util/main/module-info.java | 3 +- .../main/org/apache/sis/io/LineAppender.java | 59 +- .../main/org/apache/sis/io/package-info.java | 2 +- .../main/org/apache/sis/pending/jdk/JDK17.java | 26 +- .../main/org/apache/sis/setup/About.java | 118 ++-- .../org/apache/sis/util/collection/TreeTables.java | 26 +- .../org/apache/sis/util/logging/Initializer.java | 70 +- .../apache/sis/util/logging/MonolineFormatter.java | 83 ++- .../org/apache/sis/util/resources/Vocabulary.java | 10 +- .../sis/util/resources/Vocabulary.properties | 2 +- .../sis/util/resources/Vocabulary_fr.properties | 2 +- .../sis/storage/shapefile/ShapefileStore.java | 433 +++++++++--- .../shapefile/shp/ShapeGeometryEncoder.java | 241 ++++++- .../sis/storage/shapefile/shp/ShapeRecord.java | 4 + .../sis/storage/shapefile/shp/ShapeWriter.java | 14 +- .../sis/storage/shapefile/ShapefileStoreTest.java | 122 +++- .../test/module-info.java | 3 + optional/src/org.apache.sis.gui/bundle/bin/sis | 4 + .../bundle/bin/{sis => sis_shell} | 19 +- .../bundle/bin/{sisfx.bat => sis_shell.bat} | 22 +- optional/src/org.apache.sis.gui/bundle/bin/sisfx | 44 +- .../src/org.apache.sis.gui/bundle/bin/sisfx.bat | 2 +- .../src/org.apache.sis.gui/bundle/conf/imports.jsh | 299 ++++++++ .../bundle/conf/logging.properties | 6 +- .../main/org/apache/sis/gui/setup/Wizard.java | 4 +- 52 files changed, 2732 insertions(+), 571 deletions(-) diff --cc endorsed/src/org.apache.sis.console/main/org/apache/sis/console/IdentifierCommand.java index f36bd9ac8b,60ad797a9a..4255135e89 --- a/endorsed/src/org.apache.sis.console/main/org/apache/sis/console/IdentifierCommand.java +++ b/endorsed/src/org.apache.sis.console/main/org/apache/sis/console/IdentifierCommand.java @@@ -125,12 -126,17 +130,17 @@@ final class IdentifierCommand extends F } if (metadata != null) { final List<Row> rows; - if (metadata instanceof Metadata) { + if (metadata instanceof DefaultMetadata) { rows = new ArrayList<>(); - final Identifier id = ((Metadata) metadata).getMetadataIdentifier(); - if (id != null) { - CharSequence desc = id.getDescription(); + final Identifier id = ((DefaultMetadata) metadata).getMetadataIdentifier(); + if (id instanceof DefaultIdentifier) { + CharSequence desc = ((DefaultIdentifier) id).getDescription(); - if (desc != null && !files.isEmpty()) desc = files.get(0); + if (desc == null && !files.isEmpty()) { + final Object c = files.get(0); + if (c instanceof CharSequence) { + desc = c.toString(); + } + } rows.add(new Row(State.VALID, IdentifiedObjects.toString(id), desc)); } for (final ReferenceSystem rs : ((Metadata) metadata).getReferenceSystemInfo()) { diff --cc endorsed/src/org.apache.sis.console/main/org/apache/sis/console/TransformCommand.java index ad996db89f,ea45f0ee88..af72ada778 --- a/endorsed/src/org.apache.sis.console/main/org/apache/sis/console/TransformCommand.java +++ b/endorsed/src/org.apache.sis.console/main/org/apache/sis/console/TransformCommand.java @@@ -74,10 -77,10 +77,11 @@@ import org.apache.sis.metadata.iso.exte import org.apache.sis.util.resources.Vocabulary; import org.apache.sis.util.resources.Errors; import org.apache.sis.util.logging.Logging; + import org.apache.sis.setup.OptionKey; -// Specific to the geoapi-3.1 and geoapi-4.0 branches: -import org.opengis.referencing.ObjectDomain; +// Specific to the main branch: +import org.apache.sis.referencing.DefaultObjectDomain; +import org.apache.sis.referencing.internal.Legacy; /** diff --cc endorsed/src/org.apache.sis.console/test/org/apache/sis/console/MetadataCommandTest.java index 571d2d767e,850072701a..176d5cce0e --- a/endorsed/src/org.apache.sis.console/test/org/apache/sis/console/MetadataCommandTest.java +++ b/endorsed/src/org.apache.sis.console/test/org/apache/sis/console/MetadataCommandTest.java @@@ -48,10 -48,9 +48,10 @@@ public final class MetadataCommandTest * @throws Exception if an error occurred while creating the command. */ @Test + @Ignore("Requires GeoAPI 3.1") public void testNetCDF() throws Exception { - final URL url = TestData.NETCDF_2D_GEOGRAPHIC.location(); + final URL url = new URL("Cube2D_geographic_packed.nc"); // TestData.NETCDF_2D_GEOGRAPHIC.location(); - final MetadataCommand test = new MetadataCommand(0, CommandRunner.TEST, url.toString()); + var test = new MetadataCommand(0, new String[] {CommandRunner.TEST, url.toString()}); test.run(); verifyNetCDF("Metadata", test.outputBuffer.toString()); } @@@ -73,11 -72,10 +73,11 @@@ * @throws Exception if an error occurred while creating the command. */ @Test + @Ignore("Requires GeoAPI 3.1") @DependsOnMethod("testNetCDF") public void testFormatXML() throws Exception { - final URL url = TestData.NETCDF_2D_GEOGRAPHIC.location(); + final URL url = new URL("Cube2D_geographic_packed.nc") ; // TestData.NETCDF_2D_GEOGRAPHIC.location(); - final MetadataCommand test = new MetadataCommand(0, CommandRunner.TEST, url.toString(), "--format", "XML"); + var test = new MetadataCommand(0, new String[] {CommandRunner.TEST, url.toString(), "--format", "XML"}); test.run(); verifyNetCDF("<?xml", test.outputBuffer.toString()); } diff --cc incubator/src/org.apache.sis.storage.shapefile/main/org/apache/sis/storage/shapefile/ShapefileStore.java index 928948f815,2d6fe5e827..a08f310fb5 --- a/incubator/src/org.apache.sis.storage.shapefile/main/org/apache/sis/storage/shapefile/ShapefileStore.java +++ b/incubator/src/org.apache.sis.storage.shapefile/main/org/apache/sis/storage/shapefile/ShapefileStore.java @@@ -588,58 -592,71 +593,71 @@@ public final class ShapefileStore exten } @Override - public void updateType(DefaultFeatureType newType) throws DataStoreException { - if (true) throw new UnsupportedOperationException("Not supported yet."); - public synchronized void updateType(FeatureType newType) throws DataStoreException { ++ public synchronized void updateType(DefaultFeatureType newType) throws DataStoreException { if (!isDefaultView()) throw new DataStoreException("Resource not writable in current filter state"); if (Files.exists(shpPath)) { throw new DataStoreException("Update type is possible only when files do not exist. It can be used to create a new shapefile but not to update one."); } - final ShapeHeader shpHeader = new ShapeHeader(); - final DBFHeader dbfHeader = new DBFHeader(); - final Charset charset = userDefinedCharSet == null ? StandardCharsets.UTF_8 : userDefinedCharSet; - CoordinateReferenceSystem crs = CommonCRS.WGS84.normalizedGeographic(); - - for (AbstractIdentifiedType pt : newType.getProperties(true)) { - if (pt instanceof DefaultAttributeType) { - final DefaultAttributeType at = (DefaultAttributeType) pt; - final Class valueClass = at.getValueClass(); - - Integer length = AttributeConvention.getMaximalLengthCharacteristic(newType, pt); - if (length == 0) length = 255; + lock.writeLock().lock(); + try { + final ShapeHeader shpHeader = new ShapeHeader(); + shpHeader.bbox = new ImmutableEnvelope(new GeneralEnvelope(4)); + final DBFHeader dbfHeader = new DBFHeader(); + dbfHeader.fields = new DBFField[0]; + final Charset charset = userDefinedCharSet == null ? StandardCharsets.UTF_8 : userDefinedCharSet; + CoordinateReferenceSystem crs = CommonCRS.WGS84.normalizedGeographic(); + - for (PropertyType pt : newType.getProperties(true)) { - if (pt instanceof AttributeType) { - final AttributeType at = (AttributeType) pt; ++ for (AbstractIdentifiedType pt : newType.getProperties(true)) { ++ if (pt instanceof DefaultAttributeType) { ++ final DefaultAttributeType at = (DefaultAttributeType) pt; + final Class valueClass = at.getValueClass(); + final String attName = at.getName().tip().toString(); + + Integer length = AttributeConvention.getMaximalLengthCharacteristic(newType, pt); + if (length == null || length == 0) length = 255; + + if (Geometry.class.isAssignableFrom(valueClass)) { + if (shpHeader.shapeType != 0) { + throw new DataStoreException("Shapefile format can only contain one geometry"); + } + if (Point.class.isAssignableFrom(valueClass)) shpHeader.shapeType = ShapeType.VALUE_POINT; + else if (MultiPoint.class.isAssignableFrom(valueClass)) + shpHeader.shapeType = ShapeType.VALUE_MULTIPOINT; + else if (LineString.class.isAssignableFrom(valueClass) || MultiLineString.class.isAssignableFrom(valueClass)) + shpHeader.shapeType = ShapeType.VALUE_POLYLINE; + else if (Polygon.class.isAssignableFrom(valueClass) || MultiPolygon.class.isAssignableFrom(valueClass)) + shpHeader.shapeType = ShapeType.VALUE_POLYGON; + else throw new DataStoreException("Unsupported geometry type " + valueClass); + + Object cdt = at.characteristics().get(AttributeConvention.CRS); - if (cdt instanceof AttributeType) { - Object defaultValue = ((AttributeType) cdt).getDefaultValue(); ++ if (cdt instanceof DefaultAttributeType) { ++ Object defaultValue = ((DefaultAttributeType) cdt).getDefaultValue(); + if (defaultValue instanceof CoordinateReferenceSystem) { + crs = (CoordinateReferenceSystem) defaultValue; + } + } - if (Geometry.class.isAssignableFrom(valueClass)) { - if (shpHeader.shapeType != 0) { - throw new DataStoreException("Shapefile format can only contain one geometry"); - } - if (Point.class.isAssignableFrom(valueClass)) shpHeader.shapeType = ShapeType.VALUE_POINT; - else if (MultiPoint.class.isAssignableFrom(valueClass)) shpHeader.shapeType = ShapeType.VALUE_MULTIPOINT; - else if (LineString.class.isAssignableFrom(valueClass) || MultiLineString.class.isAssignableFrom(valueClass)) shpHeader.shapeType = ShapeType.VALUE_POLYLINE; - else if (Polygon.class.isAssignableFrom(valueClass) || MultiPolygon.class.isAssignableFrom(valueClass)) shpHeader.shapeType = ShapeType.VALUE_POLYGON; - else throw new DataStoreException("Unsupported geometry type " + valueClass); - - Object cdt = at.characteristics().get(AttributeConvention.CRS_CHARACTERISTIC); - if (cdt instanceof CoordinateReferenceSystem) { - crs = (CoordinateReferenceSystem) cdt; + } else if (String.class.isAssignableFrom(valueClass)) { + dbfHeader.fields = ArraysExt.append(dbfHeader.fields, new DBFField(attName, (char) DBFField.TYPE_CHAR, 0, length, 0, charset)); + } else if (Byte.class.isAssignableFrom(valueClass)) { + dbfHeader.fields = ArraysExt.append(dbfHeader.fields, new DBFField(attName, (char) DBFField.TYPE_NUMBER, 0, 4, 0, null)); + } else if (Short.class.isAssignableFrom(valueClass)) { + dbfHeader.fields = ArraysExt.append(dbfHeader.fields, new DBFField(attName, (char) DBFField.TYPE_NUMBER, 0, 6, 0, null)); + } else if (Integer.class.isAssignableFrom(valueClass)) { + dbfHeader.fields = ArraysExt.append(dbfHeader.fields, new DBFField(attName, (char) DBFField.TYPE_NUMBER, 0, 9, 0, null)); + } else if (Long.class.isAssignableFrom(valueClass)) { + dbfHeader.fields = ArraysExt.append(dbfHeader.fields, new DBFField(attName, (char) DBFField.TYPE_NUMBER, 0, 19, 0, null)); + } else if (Float.class.isAssignableFrom(valueClass)) { + dbfHeader.fields = ArraysExt.append(dbfHeader.fields, new DBFField(attName, (char) DBFField.TYPE_NUMBER, 0, 11, 8, null)); + } else if (Double.class.isAssignableFrom(valueClass)) { + dbfHeader.fields = ArraysExt.append(dbfHeader.fields, new DBFField(attName, (char) DBFField.TYPE_NUMBER, 0, 33, 30, null)); + } else if (LocalDate.class.isAssignableFrom(valueClass)) { + dbfHeader.fields = ArraysExt.append(dbfHeader.fields, new DBFField(attName, (char) DBFField.TYPE_DATE, 0, 20, 0, null)); + } else { + LOGGER.log(Level.WARNING, "Shapefile writing, field {0} is not supported", pt.getName()); } - - } else if (String.class.isAssignableFrom(valueClass)) { - dbfHeader.fields = ArraysExt.append(dbfHeader.fields, new DBFField(idField, (char)DBFField.TYPE_CHAR, 0, length, 0, charset)); - } else if (Byte.class.isAssignableFrom(valueClass)) { - dbfHeader.fields = ArraysExt.append(dbfHeader.fields, new DBFField(idField, (char)DBFField.TYPE_NUMBER, 0, 4, 0, null)); - } else if (Short.class.isAssignableFrom(valueClass)) { - dbfHeader.fields = ArraysExt.append(dbfHeader.fields, new DBFField(idField, (char)DBFField.TYPE_NUMBER, 0, 6, 0, null)); - } else if (Integer.class.isAssignableFrom(valueClass)) { - dbfHeader.fields = ArraysExt.append(dbfHeader.fields, new DBFField(idField, (char)DBFField.TYPE_NUMBER, 0, 9, 0, null)); - } else if (Long.class.isAssignableFrom(valueClass)) { - dbfHeader.fields = ArraysExt.append(dbfHeader.fields, new DBFField(idField, (char)DBFField.TYPE_NUMBER, 0, 19, 0, null)); - } else if (Float.class.isAssignableFrom(valueClass)) { - dbfHeader.fields = ArraysExt.append(dbfHeader.fields, new DBFField(idField, (char)DBFField.TYPE_NUMBER, 0, 33, 0, null)); - } else if (Double.class.isAssignableFrom(valueClass)) { - - } else if (LocalDate.class.isAssignableFrom(valueClass)) { - } else { LOGGER.log(Level.WARNING, "Shapefile writing, field {0} is not supported", pt.getName()); } @@@ -683,21 -709,88 +710,88 @@@ } @Override - public void add(Iterator<? extends Feature> features) throws DataStoreException { + public void add(Iterator<? extends AbstractFeature> features) throws DataStoreException { if (!isDefaultView()) throw new DataStoreException("Resource not writable in current filter state"); - throw new UnsupportedOperationException("Not supported yet."); + if (!Files.exists(shpPath)) throw new DataStoreException("FeatureType do not exist, use updateType before modifying features."); + + final Writer writer = new Writer(charset); + try { + //write existing features - try (Stream<Feature> stream = features(false)) { - Iterator<Feature> iterator = stream.iterator(); ++ try (Stream<AbstractFeature> stream = features(false)) { ++ Iterator<AbstractFeature> iterator = stream.iterator(); + while (iterator.hasNext()) { + writer.write(iterator.next()); + } + } + + //write new features + while (features.hasNext()) { + writer.write(features.next()); + } + + writer.finish(true); + } catch (IOException ex) { + try { + writer.finish(false); + } catch (IOException e) { + ex.addSuppressed(e); + } + throw new DataStoreException("Writing failed", ex); + } } @Override - public void removeIf(Predicate<? super Feature> filter) throws DataStoreException { + public void removeIf(Predicate<? super AbstractFeature> filter) throws DataStoreException { if (!isDefaultView()) throw new DataStoreException("Resource not writable in current filter state"); - throw new UnsupportedOperationException("Not supported yet."); + if (!Files.exists(shpPath)) throw new DataStoreException("FeatureType do not exist, use updateType before modifying features."); + + final Writer writer = new Writer(charset); + try { + //write existing features not matching filter - try (Stream<Feature> stream = features(false)) { - Iterator<Feature> iterator = stream.filter(filter.negate()).iterator(); ++ try (Stream<AbstractFeature> stream = features(false)) { ++ Iterator<AbstractFeature> iterator = stream.filter(filter.negate()).iterator(); + while (iterator.hasNext()) { + writer.write(iterator.next()); + } + } + writer.finish(true); + } catch (IOException ex) { + try { + writer.finish(false); + } catch (IOException e) { + ex.addSuppressed(e); + } + throw new DataStoreException("Writing failed", ex); + } } @Override - public void replaceIf(Predicate<? super Feature> filter, UnaryOperator<Feature> updater) throws DataStoreException { + public void replaceIf(Predicate<? super AbstractFeature> filter, UnaryOperator<AbstractFeature> updater) throws DataStoreException { if (!isDefaultView()) throw new DataStoreException("Resource not writable in current filter state"); - throw new UnsupportedOperationException("Not supported yet."); + if (!Files.exists(shpPath)) throw new DataStoreException("FeatureType do not exist, use updateType before modifying features."); + + final Writer writer = new Writer(charset); + try { + //write existing features applying modifications - try (Stream<Feature> stream = features(false)) { - Iterator<Feature> iterator = stream.iterator(); ++ try (Stream<AbstractFeature> stream = features(false)) { ++ Iterator<AbstractFeature> iterator = stream.iterator(); + while (iterator.hasNext()) { - Feature feature = iterator.next(); ++ AbstractFeature feature = iterator.next(); + if (filter.test(feature)) { + feature = updater.apply(feature); + } + if (feature != null) writer.write(feature); + } + } + writer.finish(true); + } catch (IOException ex) { + try { + writer.finish(false); + } catch (IOException e) { + ex.addSuppressed(e); + } + throw new DataStoreException("Writing failed", ex); + } } @Override @@@ -897,5 -1042,124 +1043,124 @@@ return env; } + private class Writer { + + private final ShpFiles tempFiles; + private final ShapeWriter shpWriter; + private final DBFWriter dbfWriter; + private final IndexWriter shxWriter; + private final ShapeHeader shpHeader; + private final DBFHeader dbfHeader; + private String defaultGeomName = null; + private int inc = 0; + + private Writer(Charset charset) throws DataStoreException{ + try { + tempFiles = files.createTempFiles(); + } catch (IOException ex) { + throw new DataStoreException("Failed to create temp files", ex); + } + + try { + //get original headers and information + try (ShapeReader reader = new ShapeReader(ShpFiles.openReadChannel(files.shpFile), null)) { + shpHeader = new ShapeHeader(reader.getHeader()); + } + try (DBFReader reader = new DBFReader(ShpFiles.openReadChannel(files.dbfFile), charset, null)) { + dbfHeader = new DBFHeader(reader.getHeader()); + } + + //unchanged files + ShpFiles.replace(files.cpgFile, tempFiles.getCpg(true)); + ShpFiles.replace(files.prjFile, tempFiles.getPrj(true)); + + //start new files + + //write shapefile + shpWriter = new ShapeWriter(ShpFiles.openWriteChannel(tempFiles.shpFile, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)); + dbfWriter = new DBFWriter(ShpFiles.openWriteChannel(tempFiles.getDbf(true), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)); + shxWriter = new IndexWriter(ShpFiles.openWriteChannel(tempFiles.getShx(true), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)); + shpWriter.write(shpHeader); + shxWriter.write(shpHeader); + dbfWriter.write(dbfHeader); + } catch (IOException ex) { + try { + tempFiles.deleteFiles(); + } catch (IOException e) { + ex.addSuppressed(e); + } + throw new DataStoreException("Failed to create temp files", ex); + } + + } + - private void write(Feature feature) throws IOException { ++ private void write(AbstractFeature feature) throws IOException { + inc++; //number starts at 1 + final ShapeRecord shpRecord = new ShapeRecord(); + final DBFRecord dbfRecord = new DBFRecord(); + final long recordStartPosition = shpWriter.getSteamPosition(); + + if (defaultGeomName == null) { + //search for the geometry name - for (PropertyType pt : feature.getType().getProperties(true)) { - if (pt instanceof AttributeType) { - final AttributeType at = (AttributeType) pt; ++ for (AbstractIdentifiedType pt : feature.getType().getProperties(true)) { ++ if (pt instanceof DefaultAttributeType) { ++ final DefaultAttributeType at = (DefaultAttributeType) pt; + final String attName = at.getName().toString(); + if (Geometry.class.isAssignableFrom(at.getValueClass())) { + defaultGeomName = attName; + } + } + } + if (defaultGeomName == null) { + throw new IOException("Failed to find a geometry attribute in given features"); + } + } + + //write geometry + Object value = feature.getPropertyValue(defaultGeomName); + if (value instanceof Geometry) { + shpRecord.geometry = (Geometry) value; + shpRecord.recordNumber = inc; + } else { + throw new IOException("Feature geometry property is not a geometry"); + } + shpWriter.write(shpRecord); + final long recordEndPosition = shpWriter.getSteamPosition(); + + //write index + shxWriter.write(Math.toIntExact(recordStartPosition), Math.toIntExact(recordEndPosition - recordStartPosition)); + + //copy dbf fields + dbfRecord.fields = new Object[dbfHeader.fields.length]; + for (int i = 0; i < dbfRecord.fields.length; i++) { + dbfRecord.fields[i] = feature.getPropertyValue(dbfHeader.fields[i].fieldName); + } + dbfWriter.write(dbfRecord); + } + + /** + * Close file writers and replace original files if true. + */ + private void finish(boolean replaceOriginals) throws IOException { + try { + shpWriter.close(); + dbfWriter.close(); + shxWriter.close(); + tempFiles.scan(); + if (replaceOriginals) { + lock.writeLock().lock(); + try { + //swap files + tempFiles.replace(files); + } finally { + lock.writeLock().unlock(); + } + } + } finally { + tempFiles.deleteFiles(); + } + } + } + } diff --cc incubator/src/org.apache.sis.storage.shapefile/test/org/apache/sis/storage/shapefile/ShapefileStoreTest.java index f2c3c8ff0d,6fd3fcf34c..b2676508bc --- a/incubator/src/org.apache.sis.storage.shapefile/test/org/apache/sis/storage/shapefile/ShapefileStoreTest.java +++ b/incubator/src/org.apache.sis.storage.shapefile/test/org/apache/sis/storage/shapefile/ShapefileStoreTest.java @@@ -181,10 -192,54 +191,54 @@@ public class ShapefileStoreTest /** * Test creating a new shapefile. */ - @Ignore @Test - public void testCreate() throws URISyntaxException, DataStoreException { - //todo + public void testCreate() throws URISyntaxException, DataStoreException, IOException { + final Path temp = Files.createTempFile("test", ".shp"); + Files.delete(temp); + final String name = temp.getFileName().toString().split("\\.")[0]; + try (final ShapefileStore store = new ShapefileStore(temp)) { + Path[] componentFiles = store.getComponentFiles(); + assertEquals(0, componentFiles.length); + + {//create type - final FeatureType type = createType(); ++ final DefaultFeatureType type = createType(); + store.updateType(type); + } + + {//check files have been created + componentFiles = store.getComponentFiles(); + assertEquals(5, componentFiles.length); + assertTrue(componentFiles[0].toString().endsWith(name+".shp")); + assertTrue(componentFiles[1].toString().endsWith(name+".shx")); + assertTrue(componentFiles[2].toString().endsWith(name+".dbf")); + assertTrue( componentFiles[3].toString().endsWith(name+".prj")); + assertTrue(componentFiles[4].toString().endsWith(name+".cpg")); + } + + {// check created type - FeatureType type = store.getType(); ++ DefaultFeatureType type = store.getType(); + assertEquals(name, type.getName().toString()); + System.out.println(type.toString()); + assertEquals(9, type.getProperties(true).size()); + assertNotNull(type.getProperty("sis:identifier")); + assertNotNull(type.getProperty("sis:envelope")); + assertNotNull(type.getProperty("sis:geometry")); - final var geomProp = (AttributeType) type.getProperty("geometry"); - final var idProp = (AttributeType) type.getProperty("id"); - final var textProp = (AttributeType) type.getProperty("text"); - final var integerProp = (AttributeType) type.getProperty("integer"); - final var floatProp = (AttributeType) type.getProperty("float"); - final var dateProp = (AttributeType) type.getProperty("date"); - final AttributeType crsChar = (AttributeType) geomProp.characteristics().get(AttributeConvention.CRS); ++ final var geomProp = (DefaultAttributeType) type.getProperty("geometry"); ++ final var idProp = (DefaultAttributeType) type.getProperty("id"); ++ final var textProp = (DefaultAttributeType) type.getProperty("text"); ++ final var integerProp = (DefaultAttributeType) type.getProperty("integer"); ++ final var floatProp = (DefaultAttributeType) type.getProperty("float"); ++ final var dateProp = (DefaultAttributeType) type.getProperty("date"); ++ final DefaultAttributeType crsChar = (DefaultAttributeType) geomProp.characteristics().get(AttributeConvention.CRS); + assertTrue(Utilities.equalsIgnoreMetadata(CommonCRS.WGS84.geographic(),crsChar.getDefaultValue())); + assertEquals(Point.class, geomProp.getValueClass()); + assertEquals(Integer.class, idProp.getValueClass()); + assertEquals(String.class, textProp.getValueClass()); + assertEquals(Integer.class, integerProp.getValueClass()); + assertEquals(Double.class, floatProp.getValueClass()); + assertEquals(LocalDate.class, dateProp.getValueClass()); + } + } } /** @@@ -192,8 -247,22 +246,22 @@@ */ @Ignore @Test - public void testAddFeatures() throws URISyntaxException, DataStoreException { - //todo + public void testAddFeatures() throws URISyntaxException, DataStoreException, IOException { + final Path temp = Files.createTempFile("test", ".shp"); + Files.delete(temp); + try (final ShapefileStore store = new ShapefileStore(temp)) { - FeatureType type = createType(); ++ DefaultFeatureType type = createType(); + store.updateType(type); + type = store.getType(); + - Feature feature1 = createFeature1(type); - Feature feature2 = createFeature2(type); ++ AbstractFeature feature1 = createFeature1(type); ++ AbstractFeature feature2 = createFeature2(type); + store.add(List.of(feature1, feature2).iterator()); + + Object[] result = store.features(false).toArray(); + + + } } /** @@@ -214,4 -283,38 +282,38 @@@ //todo } - private static FeatureType createType() { ++ private static DefaultFeatureType createType() { + final FeatureTypeBuilder ftb = new FeatureTypeBuilder(); + ftb.setName("test"); + ftb.addAttribute(Integer.class).setName("id"); + ftb.addAttribute(String.class).setName("text"); + ftb.addAttribute(Integer.class).setName("integer"); + ftb.addAttribute(Float.class).setName("float"); + ftb.addAttribute(LocalDate.class).setName("date"); + ftb.addAttribute(Point.class).setName("geometry").setCRS(CommonCRS.WGS84.geographic()); + return ftb.build(); + } + - private static Feature createFeature1(FeatureType type) { - Feature feature = type.newInstance(); ++ private static AbstractFeature createFeature1(DefaultFeatureType type) { ++ AbstractFeature feature = type.newInstance(); + feature.setPropertyValue("geometry", GF.createPoint(new Coordinate(10,20))); + feature.setPropertyValue("id", 1); + feature.setPropertyValue("text", "some text 1"); + feature.setPropertyValue("integer", 123); + feature.setPropertyValue("float", 123.456); + feature.setPropertyValue("date", LocalDate.of(2023, 5, 12)); + return feature; + } + - private static Feature createFeature2(FeatureType type) { - Feature feature = type.newInstance(); ++ private static AbstractFeature createFeature2(DefaultFeatureType type) { ++ AbstractFeature feature = type.newInstance(); + feature.setPropertyValue("geometry", GF.createPoint(new Coordinate(30,40))); + feature.setPropertyValue("id", 2); + feature.setPropertyValue("text", "some text 2"); + feature.setPropertyValue("integer", 456); + feature.setPropertyValue("float", 456.789); + feature.setPropertyValue("date", LocalDate.of(2030, 6, 21)); + return feature; + } + }