diff --git a/api/bag.cpp b/api/bag.cpp index 66ec97b61b..e2623a58d0 100644 --- a/api/bag.cpp +++ b/api/bag.cpp @@ -824,7 +824,7 @@ BagError bagGetErrorString( strncpy(str, "Metadata One or more elements of the requested coverage are missing from the XML file", MAX_STR-1); break; case BAG_METADTA_INVLID_DIMENSIONS: - sprintf(str, "Metadata The number of dimensions is incorrect (not equal to %d)", RANK); + snprintf(str, MAX_STR, "Metadata The number of dimensions is incorrect (not equal to %d)", RANK); break; case BAG_METADTA_BUFFER_EXCEEDED: strncpy(str, "Metadata supplied buffer is too large to be stored in the internal array", MAX_STR-1); @@ -866,7 +866,7 @@ BagError bagGetErrorString( strncpy(str, "HDF Bag is not an HDF5 File", MAX_STR-1); break; case BAG_HDF_RANK_INCOMPATIBLE: - sprintf(str, "HDF Bag's rank is incompatible with expected Rank of the Datasets: %d", RANK); + snprintf(str, MAX_STR, "HDF Bag's rank is incompatible with expected Rank of the Datasets: %d", RANK); break; case BAG_HDF_TYPE_NOT_FOUND: strncpy(str, "HDF Bag surface Datatype parameter not available", MAX_STR-1); diff --git a/api/bag_dataset.cpp b/api/bag_dataset.cpp index ab04c9276b..c92ce2c615 100644 --- a/api/bag_dataset.cpp +++ b/api/bag_dataset.cpp @@ -501,12 +501,12 @@ void Dataset::createDataset( // Mandatory Layers // Elevation - this->addLayer(SimpleLayer::create(*this, Elevation, chunkSize, - compressionLevel)); + this->addLayer(SimpleLayer::create(*this, Elevation, m_pMetadata->rows(), m_pMetadata->columns(), + chunkSize, compressionLevel)); // Uncertainty - this->addLayer(SimpleLayer::create(*this, Uncertainty, chunkSize, - compressionLevel)); + this->addLayer(SimpleLayer::create(*this, Uncertainty, m_pMetadata->rows(), m_pMetadata->columns(), + chunkSize, compressionLevel)); } //! Create an optional simple layer. @@ -545,8 +545,8 @@ Layer& Dataset::createSimpleLayer( case Num_Soundings: //[[fallthrough]]; case Average_Elevation: //[[fallthrough]]; case Nominal_Elevation: - return this->addLayer(SimpleLayer::create(*this, type, chunkSize, - compressionLevel)); + return this->addLayer(SimpleLayer::create(*this, type, + m_pMetadata->rows(), m_pMetadata->columns(), chunkSize, compressionLevel)); case Surface_Correction: //[[fallthrough]]; case Georef_Metadata: //[[fallthrough]]; default: @@ -1096,7 +1096,10 @@ void Dataset::readDataset( H5Dclose(id); - auto layerDesc = SimpleLayerDescriptor::open(*this, layerType); + // Pre-stage the layer-specific desciptor. Note that we don't need to specify the + // dimensions of the layer here, since they're set from the HDF5 dataset when it + // gets opened with SimpleLayer::open(). + auto layerDesc = SimpleLayerDescriptor::open(*this, layerType, 0, 0); this->addLayer(SimpleLayer::open(*this, *layerDesc)); } @@ -1159,7 +1162,10 @@ void Dataset::readDataset( } { - auto descriptor = VRRefinementsDescriptor::open(*this); + // Pre-stage the layer-specific descriptor for the refinements; note that this + // doesn't have to have specific dimensions since they're set when the refinements + // layer is read in VRRefinements::open(). + auto descriptor = VRRefinementsDescriptor::open(*this, 0, 0); this->addLayer(VRRefinements::open(*this, *descriptor)); } @@ -1169,7 +1175,10 @@ void Dataset::readDataset( { H5Dclose(id); - auto descriptor = VRNodeDescriptor::open(*this); + // Pre-stage the layer-specific descriptor for the nodes; note that this doesn't + // have to have specific dimensions since they're set when the nodes layer is + // read in VRNode::open(). + auto descriptor = VRNodeDescriptor::open(*this, 0, 0); this->addLayer(VRNode::open(*this, *descriptor)); } } diff --git a/api/bag_georefmetadatalayer.cpp b/api/bag_georefmetadatalayer.cpp index bde3e0e66e..0eefbf499b 100644 --- a/api/bag_georefmetadatalayer.cpp +++ b/api/bag_georefmetadatalayer.cpp @@ -104,8 +104,13 @@ std::shared_ptr GeorefMetadataLayer::create( keyType != DT_UINT64) throw InvalidKeyType{}; + // The keys array should be the same dimensions as the mandatory elevation layer, so read + // from the file global descriptor, and set. + uint32_t rows = 0, cols = 0; + std::tie(rows, cols) = dataset.getDescriptor().getDims(); auto pDescriptor = GeorefMetadataLayerDescriptor::create(dataset, name, profile, keyType, - definition, chunkSize, compressionLevel); + definition, rows, cols, + chunkSize, compressionLevel); // Create the H5 Group to hold keys & values. const auto& h5file = dataset.getH5file(); @@ -122,7 +127,8 @@ std::shared_ptr GeorefMetadataLayer::create( auto h5valueDataSet = GeorefMetadataLayer::createH5valueDataSet(dataset, *pDescriptor); auto layer = std::make_shared(dataset, - *pDescriptor, std::move(h5keyDataSet), std::move(h5vrKeyDataSet), + *pDescriptor, std::move(h5keyDataSet), + std::move(h5vrKeyDataSet), std::move(h5valueDataSet)); layer->setValueTable(std::unique_ptr(new ValueTable{*layer})); @@ -150,6 +156,12 @@ std::shared_ptr GeorefMetadataLayer::open( new ::H5::DataSet{h5file.openDataSet(internalPath + COMPOUND_KEYS)}, DeleteH5dataSet{}); + // The keys array has the dimensions of the layer, so we can read and reset the + // descriptor dimensions, in case they were inconsistent (or not set). + std::array dims; + h5keyDataSet->getSpace().getSimpleExtentDims(dims.data(), nullptr); + descriptor.setDims(dims[0], dims[1]); + std::unique_ptr<::H5::DataSet, DeleteH5dataSet> h5vrKeyDataSet{}; if (dataset.getVRMetadata()) h5vrKeyDataSet = std::unique_ptr<::H5::DataSet, DeleteH5dataSet>( @@ -161,7 +173,9 @@ std::shared_ptr GeorefMetadataLayer::open( DeleteH5dataSet{}); auto layer = std::make_shared(dataset, - descriptor, std::move(h5keyDataSet), std::move(h5vrKeyDataSet), + descriptor, + std::move(h5keyDataSet), + std::move(h5vrKeyDataSet), std::move(h5valueDataSet)); layer->setValueTable(std::unique_ptr(new ValueTable{*layer})); @@ -188,7 +202,9 @@ GeorefMetadataLayer::createH5keyDataSet( std::unique_ptr<::H5::DataSet, DeleteH5dataSet> pH5dataSet; { - // Use the dimensions from the descriptor. + // Use the dimensions from the descriptor. We could do this from the specific + // descriptor for the layer, too, which should mirror the size of the file global + // descriptor used here. uint32_t dim0 = 0, dim1 = 0; std::tie(dim0, dim1) = dataset.getDescriptor().getDims(); const std::array fileDims{dim0, dim1}; diff --git a/api/bag_georefmetadatalayer.h b/api/bag_georefmetadatalayer.h index 32f940f371..faceb66d78 100644 --- a/api/bag_georefmetadatalayer.h +++ b/api/bag_georefmetadatalayer.h @@ -64,8 +64,8 @@ class BAG_API GeorefMetadataLayer final : public Layer protected: static std::shared_ptr create(DataType keyType, const std::string& name, GeorefMetadataProfile profile, Dataset& dataset, - const RecordDefinition& definition, uint64_t chunkSize, - int compressionLevel); + const RecordDefinition& definition, + uint64_t chunkSize, int compressionLevel); static std::shared_ptr open(Dataset& dataset, GeorefMetadataLayerDescriptor& descriptor); diff --git a/api/bag_georefmetadatalayerdescriptor.cpp b/api/bag_georefmetadatalayerdescriptor.cpp index 527d0fd1a9..9a9bd0e69d 100644 --- a/api/bag_georefmetadatalayerdescriptor.cpp +++ b/api/bag_georefmetadatalayerdescriptor.cpp @@ -35,10 +35,11 @@ GeorefMetadataLayerDescriptor::GeorefMetadataLayerDescriptor( GeorefMetadataProfile profile, DataType keyType, RecordDefinition definition, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : LayerDescriptor(dataset.getNextId(), GEOREF_METADATA_PATH + name, name, - Georef_Metadata, chunkSize, compressionLevel) + Georef_Metadata, rows, cols, chunkSize, compressionLevel) , m_pBagDataset(dataset.shared_from_this()) , m_profile(profile) , m_keyType(keyType) @@ -72,12 +73,14 @@ std::shared_ptr GeorefMetadataLayerDescriptor::cr GeorefMetadataProfile profile, DataType keyType, RecordDefinition definition, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) { return std::shared_ptr( new GeorefMetadataLayerDescriptor{dataset, name, profile, keyType, - std::move(definition), chunkSize, compressionLevel}); + std::move(definition), rows, cols, + chunkSize, compressionLevel}); } //! Open an existing georeferenced metadata layer descriptor. @@ -165,8 +168,13 @@ std::shared_ptr GeorefMetadataLayerDescriptor::op profile = UNKNOWN_METADATA_PROFILE; } + std::array dims; + h5dataSet.getSpace().getSimpleExtentDims(dims.data(), nullptr); + return std::shared_ptr( new GeorefMetadataLayerDescriptor{dataset, name, profile, keyType, definition, + static_cast(dims[0]), + static_cast(dims[1]), chunkSize, compressionLevel}); } diff --git a/api/bag_georefmetadatalayerdescriptor.h b/api/bag_georefmetadatalayerdescriptor.h index 6749d09568..bc3d029037 100644 --- a/api/bag_georefmetadatalayerdescriptor.h +++ b/api/bag_georefmetadatalayerdescriptor.h @@ -21,12 +21,13 @@ namespace BAG { class BAG_API GeorefMetadataLayerDescriptor final : public LayerDescriptor { public: - static std::shared_ptr create(Dataset& dataset, - const std::string& name, GeorefMetadataProfile profile, DataType keyType, - RecordDefinition definition, uint64_t chunkSize, - int compressionLevel); - static std::shared_ptr open(Dataset& dataset, - const std::string& name); + static std::shared_ptr + create(Dataset& dataset, + const std::string& name, GeorefMetadataProfile profile, DataType keyType, + RecordDefinition definition, uint32_t rows, uint32_t cols, + uint64_t chunkSize, int compressionLevel); + static std::shared_ptr + open(Dataset& dataset, const std::string& name); GeorefMetadataLayerDescriptor(const GeorefMetadataLayerDescriptor&) = delete; GeorefMetadataLayerDescriptor(GeorefMetadataLayerDescriptor&&) = delete; @@ -52,7 +53,8 @@ class BAG_API GeorefMetadataLayerDescriptor final : public LayerDescriptor protected: GeorefMetadataLayerDescriptor(Dataset& dataset, const std::string& name, GeorefMetadataProfile profile, - DataType keyType, RecordDefinition definition, uint64_t chunkSize, + DataType keyType, RecordDefinition definition, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel); private: diff --git a/api/bag_interleavedlegacylayer.cpp b/api/bag_interleavedlegacylayer.cpp index d18e6f3fd7..c4e7124197 100644 --- a/api/bag_interleavedlegacylayer.cpp +++ b/api/bag_interleavedlegacylayer.cpp @@ -51,6 +51,10 @@ std::shared_ptr InterleavedLegacyLayer::open( descriptor.setMinMax(std::get<1>(possibleMinMax), std::get<2>(possibleMinMax)); + std::array dims; + h5dataSet->getSpace().getSimpleExtentDims(dims.data(), nullptr); + descriptor.setDims(dims[0], dims[1]); + return std::make_shared(dataset, descriptor, std::move(h5dataSet)); } diff --git a/api/bag_interleavedlegacylayerdescriptor.cpp b/api/bag_interleavedlegacylayerdescriptor.cpp index 179d5685f8..1567ad2df0 100644 --- a/api/bag_interleavedlegacylayerdescriptor.cpp +++ b/api/bag_interleavedlegacylayerdescriptor.cpp @@ -20,9 +20,10 @@ namespace BAG { InterleavedLegacyLayerDescriptor::InterleavedLegacyLayerDescriptor( uint32_t id, LayerType layerType, - GroupType groupType) + GroupType groupType, + uint32_t rows, uint32_t cols) : LayerDescriptor(id, Layer::getInternalPath(layerType), - kLayerTypeMapString.at(layerType), layerType, 0, 0) + kLayerTypeMapString.at(layerType), layerType, rows, cols, 0, 0) , m_groupType(groupType) , m_elementSize(Layer::getElementSize(Layer::getDataType(layerType))) { @@ -45,8 +46,10 @@ InterleavedLegacyLayerDescriptor::InterleavedLegacyLayerDescriptor( InterleavedLegacyLayerDescriptor::InterleavedLegacyLayerDescriptor( const Dataset& dataset, LayerType layerType, - GroupType groupType) + GroupType groupType, + uint32_t rows, uint32_t cols) : LayerDescriptor(dataset, layerType, + rows, cols, groupType == NODE ? NODE_GROUP_PATH : groupType == ELEVATION @@ -76,9 +79,11 @@ std::shared_ptr InterleavedLegacyLayerDescript LayerType layerType, GroupType groupType) { + uint32_t rows, cols; + std::tie(rows, cols) = dataset.getDescriptor().getDims(); return std::shared_ptr( new InterleavedLegacyLayerDescriptor{dataset.getNextId(), layerType, - groupType}); + groupType, rows, cols}); } //! Open an interleaved layer descriptor. @@ -99,8 +104,10 @@ std::shared_ptr InterleavedLegacyLayerDescript LayerType layerType, GroupType groupType) { + uint32_t rows, cols; + std::tie(rows, cols) = dataset.getDescriptor().getDims(); return std::shared_ptr( - new InterleavedLegacyLayerDescriptor{dataset, layerType, groupType}); + new InterleavedLegacyLayerDescriptor{dataset, layerType, groupType, rows, cols}); } diff --git a/api/bag_interleavedlegacylayerdescriptor.h b/api/bag_interleavedlegacylayerdescriptor.h index 6e5fc9d642..1d5beeeb9f 100644 --- a/api/bag_interleavedlegacylayerdescriptor.h +++ b/api/bag_interleavedlegacylayerdescriptor.h @@ -43,9 +43,9 @@ class BAG_API InterleavedLegacyLayerDescriptor final : public LayerDescriptor protected: InterleavedLegacyLayerDescriptor(uint32_t id, LayerType layerType, - GroupType groupType); + GroupType groupType, uint32_t rows, uint32_t cols); InterleavedLegacyLayerDescriptor(const Dataset& dataset, LayerType layerType, - GroupType groupType); + GroupType groupType, uint32_t rows, uint32_t cols); private: static void validateTypes(LayerType layerType, GroupType groupType); diff --git a/api/bag_layer.cpp b/api/bag_layer.cpp index dbec860765..454ef7d78d 100644 --- a/api/bag_layer.cpp +++ b/api/bag_layer.cpp @@ -208,9 +208,8 @@ UInt8Array Layer::read( if (m_pBagDataset.expired()) throw DatasetNotFound{}; - const auto pDataset = m_pBagDataset.lock(); uint32_t numRows = 0, numColumns = 0; - std::tie(numRows, numColumns) = pDataset->getDescriptor().getDims(); + std::tie(numRows, numColumns) = m_pLayerDescriptor->getDims(); if (columnEnd >= numColumns || rowEnd >= numRows) throw InvalidReadSize{}; diff --git a/api/bag_layerdescriptor.cpp b/api/bag_layerdescriptor.cpp index 931d69e7b7..f67bd59384 100644 --- a/api/bag_layerdescriptor.cpp +++ b/api/bag_layerdescriptor.cpp @@ -30,6 +30,7 @@ LayerDescriptor::LayerDescriptor( std::string internalPath, std::string name, LayerType type, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : m_id(id) @@ -39,6 +40,7 @@ LayerDescriptor::LayerDescriptor( , m_compressionLevel(compressionLevel) , m_chunkSize(chunkSize) , m_minMax(std::numeric_limits::max(), std::numeric_limits::lowest()) + , m_dims({rows, cols}) { } @@ -56,11 +58,13 @@ LayerDescriptor::LayerDescriptor( LayerDescriptor::LayerDescriptor( const Dataset& dataset, LayerType type, + uint32_t rows, uint32_t cols, std::string internalPath, std::string name) : m_id(dataset.getNextId()) , m_layerType(type) , m_minMax(std::numeric_limits::max(), std::numeric_limits::lowest()) + , m_dims({rows, cols}) { m_internalPath = internalPath.empty() ? Layer::getInternalPath(type) @@ -169,6 +173,16 @@ const std::string& LayerDescriptor::getName() const & noexcept return m_name; } +//! Retrieve the dimensions (shape) of the layer +/*! +\return + The row and column spacing/resolution of the grid +*/ +const std::tuple& LayerDescriptor::getDims() const & noexcept +{ + return m_dims; +} + //! Get the size of a buffer for reading a specified number rows and columns. /*! \param rows @@ -204,6 +218,12 @@ LayerDescriptor& LayerDescriptor::setMinMax( return *this; } +LayerDescriptor& LayerDescriptor::setDims(uint32_t rows, uint32_t cols) & noexcept +{ + m_dims = {rows, cols}; + return *this; +} + //! Set the HDF5 path of the layer. /*! \param inPath diff --git a/api/bag_layerdescriptor.h b/api/bag_layerdescriptor.h index 98ea835a8d..eb7e506747 100644 --- a/api/bag_layerdescriptor.h +++ b/api/bag_layerdescriptor.h @@ -53,14 +53,18 @@ class BAG_API LayerDescriptor : public std::enable_shared_from_this getMinMax() const noexcept; const std::string& getName() const & noexcept; + const std::tuple& getDims() const & noexcept; LayerDescriptor& setName(std::string inName) & noexcept; LayerDescriptor& setMinMax(float min, float max) & noexcept; + LayerDescriptor& setDims(uint32_t rows, uint32_t cols) & noexcept; protected: LayerDescriptor(uint32_t id, std::string internalPath, std::string name, - LayerType type, uint64_t chunkSize, int compressionLevel); + LayerType type, uint32_t rows, uint32_t cols, uint64_t chunkSize, + int compressionLevel); LayerDescriptor(const Dataset& dataset, LayerType type, + uint32_t rows, uint32_t cols, std::string internalPath = {}, std::string name = {}); size_t getReadBufferSize(uint32_t rows, uint32_t columns) const noexcept; @@ -85,6 +89,8 @@ class BAG_API LayerDescriptor : public std::enable_shared_from_this m_minMax{}; + //! The dimensions of the layer + std::tuple m_dims{}; friend GeorefMetadataLayer; friend InterleavedLegacyLayer; diff --git a/api/bag_metadata_export.cpp b/api/bag_metadata_export.cpp index 45fc3ec2f4..50087c87e9 100644 --- a/api/bag_metadata_export.cpp +++ b/api/bag_metadata_export.cpp @@ -993,7 +993,7 @@ bool addSpatialRepresentation(xmlNode &parentNode, const BagSpatialRepresentatio xmlSetProp(pPointNode, XMLCast("gml:id"), XMLCast("id1")); char pointsString[88]; - sprintf(pointsString, "%.12lf,%.12lf %.12lf,%.12lf", spatialRepresentationInfo.llCornerX, spatialRepresentationInfo.llCornerY, spatialRepresentationInfo.urCornerX, spatialRepresentationInfo.urCornerY); + snprintf(pointsString, 88, "%.12lf,%.12lf %.12lf,%.12lf", spatialRepresentationInfo.llCornerX, spatialRepresentationInfo.llCornerY, spatialRepresentationInfo.urCornerX, spatialRepresentationInfo.urCornerY); xmlNode *pCoordNode = xmlNewChild(pPointNode, pGmlNamespace, XMLCast("coordinates"), EncodedString(*parentNode.doc, pointsString)); xmlSetProp(pCoordNode, XMLCast("decimal"), XMLCast(".")); diff --git a/api/bag_metadata_import.cpp b/api/bag_metadata_import.cpp index 69cfa6df8a..20c15875f1 100644 --- a/api/bag_metadata_import.cpp +++ b/api/bag_metadata_import.cpp @@ -1247,7 +1247,7 @@ bool decodeReferenceSystemInfoFromSpatial( { char buffer[2048]; - sprintf(buffer, "%d", epsg); + snprintf(buffer, 2048, "%d", epsg); referenceSystemInfo->definition = copyString(buffer); referenceSystemInfo->type = copyString("EPSG"); diff --git a/api/bag_simplelayer.cpp b/api/bag_simplelayer.cpp index 56b059d6ce..b5d6d5846b 100644 --- a/api/bag_simplelayer.cpp +++ b/api/bag_simplelayer.cpp @@ -46,11 +46,11 @@ SimpleLayer::SimpleLayer( std::shared_ptr SimpleLayer::create( Dataset& dataset, LayerType type, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) { - auto descriptor = SimpleLayerDescriptor::create(dataset, type, chunkSize, - compressionLevel); + auto descriptor = SimpleLayerDescriptor::create(dataset, type, rows, cols, chunkSize, compressionLevel); auto h5dataSet = SimpleLayer::createH5dataSet(dataset, *descriptor); return std::make_shared(dataset, *descriptor, std::move(h5dataSet)); @@ -74,6 +74,12 @@ std::shared_ptr SimpleLayer::open( auto h5dataSet = std::unique_ptr<::H5::DataSet, DeleteH5dataSet>( new ::H5::DataSet{h5file.openDataSet(descriptor.getInternalPath())}, DeleteH5dataSet{}); + + // Configure the layer dimensions in the descriptor (we implicitally expect the layer + // to be two-dimensional) + hsize_t dims[2]; + h5dataSet->getSpace().getSimpleExtentDims(dims); + descriptor.setDims(dims[0], dims[1]); // Read the min/max attribute values. const auto possibleMinMax = dataset.getMinMax(descriptor.getLayerType()); @@ -101,7 +107,7 @@ SimpleLayer::createH5dataSet( const SimpleLayerDescriptor& descriptor) { uint32_t dim0 = 0, dim1 = 0; - std::tie(dim0, dim1) = dataset.getDescriptor().getDims(); + std::tie(dim0, dim1) = descriptor.getDims(); const std::array fileDims{dim0, dim1}; ::H5::DataSpace h5dataSpace{kRank, fileDims.data(), fileDims.data()}; diff --git a/api/bag_simplelayer.h b/api/bag_simplelayer.h index 6925f4eeba..4d280c0420 100644 --- a/api/bag_simplelayer.h +++ b/api/bag_simplelayer.h @@ -46,7 +46,7 @@ class BAG_API SimpleLayer final : public Layer protected: static std::shared_ptr create(Dataset& dataset, - LayerType type, uint64_t chunkSize, int compressionLevel); + LayerType type, uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel); static std::shared_ptr open(Dataset& dataset, SimpleLayerDescriptor& descriptor); diff --git a/api/bag_simplelayerdescriptor.cpp b/api/bag_simplelayerdescriptor.cpp index ff266f1d26..2e36968490 100644 --- a/api/bag_simplelayerdescriptor.cpp +++ b/api/bag_simplelayerdescriptor.cpp @@ -19,10 +19,11 @@ namespace BAG { SimpleLayerDescriptor::SimpleLayerDescriptor( uint32_t id, LayerType type, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : LayerDescriptor(id, Layer::getInternalPath(type), - kLayerTypeMapString.at(type), type, chunkSize, compressionLevel) + kLayerTypeMapString.at(type), type, rows, cols, chunkSize, compressionLevel) , m_elementSize(Layer::getElementSize(Layer::getDataType(type))) { } @@ -36,8 +37,9 @@ SimpleLayerDescriptor::SimpleLayerDescriptor( */ SimpleLayerDescriptor::SimpleLayerDescriptor( const Dataset& dataset, - LayerType type) - : LayerDescriptor(dataset, type) + LayerType type, + uint32_t rows, uint32_t cols) + : LayerDescriptor(dataset, type, rows, cols) , m_elementSize(Layer::getElementSize(Layer::getDataType(type))) { } @@ -59,11 +61,12 @@ SimpleLayerDescriptor::SimpleLayerDescriptor( std::shared_ptr SimpleLayerDescriptor::create( const Dataset& dataset, LayerType type, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) { return std::shared_ptr( - new SimpleLayerDescriptor{dataset.getNextId(), type, chunkSize, + new SimpleLayerDescriptor{dataset.getNextId(), type, rows, cols, chunkSize, compressionLevel}); } @@ -79,10 +82,10 @@ std::shared_ptr SimpleLayerDescriptor::create( */ std::shared_ptr SimpleLayerDescriptor::open( const Dataset& dataset, - LayerType type) + LayerType type, uint32_t rows, uint32_t cols) { return std::shared_ptr( - new SimpleLayerDescriptor{dataset, type}); + new SimpleLayerDescriptor{dataset, type, rows, cols}); } diff --git a/api/bag_simplelayerdescriptor.h b/api/bag_simplelayerdescriptor.h index b784f398fd..ca48599777 100644 --- a/api/bag_simplelayerdescriptor.h +++ b/api/bag_simplelayerdescriptor.h @@ -16,10 +16,11 @@ class BAG_API SimpleLayerDescriptor final : public LayerDescriptor { public: static std::shared_ptr create(const Dataset& dataset, - LayerType type, uint64_t chunkSize, int compressionLevel); + LayerType type, uint32_t rows, uint32_t cols, + uint64_t chunkSize, int compressionLevel); static std::shared_ptr open(const Dataset& dataset, - LayerType type); + LayerType type, uint32_t rows, uint32_t cols); SimpleLayerDescriptor(const SimpleLayerDescriptor&) = delete; SimpleLayerDescriptor(SimpleLayerDescriptor&&) = delete; @@ -36,9 +37,11 @@ class BAG_API SimpleLayerDescriptor final : public LayerDescriptor } protected: - SimpleLayerDescriptor(uint32_t id, LayerType type, uint64_t chunkSize, + SimpleLayerDescriptor(uint32_t id, LayerType type, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel); - SimpleLayerDescriptor(const Dataset& dataset, LayerType type); + SimpleLayerDescriptor(const Dataset& dataset, LayerType type, + uint32_t rows, uint32_t cols); private: DataType getDataTypeProxy() const noexcept override; diff --git a/api/bag_surfacecorrections.cpp b/api/bag_surfacecorrections.cpp index 7b6f4c912e..10be24b4ac 100644 --- a/api/bag_surfacecorrections.cpp +++ b/api/bag_surfacecorrections.cpp @@ -358,7 +358,7 @@ UInt8Array SurfaceCorrections::readCorrectedRow( --corrector; // This is 0 based when used. auto originalRow = layer.read(row, row, columnStart, columnEnd); - auto* data = reinterpret_cast(originalRow.data()); + auto data = reinterpret_cast(originalRow.data()); // Obtain cell resolution and SW origin (0,1,1,0). double swCornerX = 0., swCornerY = 0.; diff --git a/api/bag_surfacecorrectionsdescriptor.cpp b/api/bag_surfacecorrectionsdescriptor.cpp index 6e1057cb22..2039d299dd 100644 --- a/api/bag_surfacecorrectionsdescriptor.cpp +++ b/api/bag_surfacecorrectionsdescriptor.cpp @@ -54,7 +54,7 @@ SurfaceCorrectionsDescriptor::SurfaceCorrectionsDescriptor( int compressionLevel) : LayerDescriptor(id, Layer::getInternalPath(Surface_Correction), kLayerTypeMapString.at(Surface_Correction), Surface_Correction, - chunkSize, compressionLevel) + 0, 0, chunkSize, compressionLevel) // Dims default to 0,0 like derived type , m_surfaceType(type) , m_elementSize(BAG::getElementSize(type)) , m_numCorrectors(numCorrectors) @@ -68,7 +68,7 @@ SurfaceCorrectionsDescriptor::SurfaceCorrectionsDescriptor( */ SurfaceCorrectionsDescriptor::SurfaceCorrectionsDescriptor( const Dataset& dataset) - : LayerDescriptor(dataset, Surface_Correction) + : LayerDescriptor(dataset, Surface_Correction, 0, 0) // Dims set in body { const auto h5dataSet = dataset.getH5file().openDataSet( Layer::getInternalPath(Surface_Correction)); @@ -279,6 +279,7 @@ SurfaceCorrectionsDescriptor& SurfaceCorrectionsDescriptor::setDims( { m_numRows = numRows; m_numColumns = numColumns; + LayerDescriptor::setDims(numRows, numColumns); return *this; } diff --git a/api/bag_vrmetadata.cpp b/api/bag_vrmetadata.cpp index 3c77c222fb..73dda33d3e 100644 --- a/api/bag_vrmetadata.cpp +++ b/api/bag_vrmetadata.cpp @@ -373,6 +373,12 @@ void VRMetadata::writeProxy( auto pDataset = this->getDataset().lock(); pDataset->getDescriptor().setDims(static_cast(newDims[0]), static_cast(newDims[1])); + // The file descriptor is global (and the size of the mandatory layers) and specified + // in the metadata; each layer has its own size, however, which we need to update. In + // this case, the VRMetadataDescriptor has the same dimensions as the mandatory layer + // (since there should be a refinement for each fixed-resolution cell), so it's formally + // redundant. But we want to make sure that it's consistent, so ... + pDescriptor->setDims(fileDims[0], fileDims[1]); } fileDataSpace.selectHyperslab(H5S_SELECT_SET, count.data(), offset.data()); diff --git a/api/bag_vrmetadatadescriptor.cpp b/api/bag_vrmetadatadescriptor.cpp index 20aca27cec..4c64885f7c 100644 --- a/api/bag_vrmetadatadescriptor.cpp +++ b/api/bag_vrmetadatadescriptor.cpp @@ -17,11 +17,12 @@ namespace BAG { */ VRMetadataDescriptor::VRMetadataDescriptor( uint32_t id, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : LayerDescriptor(id, VR_METADATA_PATH, - kLayerTypeMapString.at(VarRes_Metadata), VarRes_Metadata, chunkSize, - compressionLevel) + kLayerTypeMapString.at(VarRes_Metadata), VarRes_Metadata, + rows, cols, chunkSize, compressionLevel) { } @@ -31,8 +32,9 @@ VRMetadataDescriptor::VRMetadataDescriptor( The BAG Dataset this layer belongs to. */ VRMetadataDescriptor::VRMetadataDescriptor( - const Dataset& dataset) - : LayerDescriptor(dataset, VarRes_Metadata, VR_METADATA_PATH) + const Dataset& dataset, + uint32_t rows, uint32_t cols) + : LayerDescriptor(dataset, VarRes_Metadata, rows, cols, VR_METADATA_PATH) { } @@ -53,9 +55,15 @@ std::shared_ptr VRMetadataDescriptor::create( uint64_t chunkSize, int compressionLevel) { + // The VRMetadataLayer has the same dimensions as the overall BAG file + // (since there should be one element for each cell in the mandatory + // layers). Reading this from the dataset layer descriptor enforces this + // and keeps the call signature simpler. + uint32_t rows, cols; + std::tie(rows, cols) = dataset.getDescriptor().getDims(); return std::shared_ptr( - new VRMetadataDescriptor{dataset.getNextId(), chunkSize, - compressionLevel}); + new VRMetadataDescriptor{dataset.getNextId(), rows, cols, + chunkSize, compressionLevel}); } //! Open an existing variable resolution metadata descriptor. @@ -69,8 +77,14 @@ std::shared_ptr VRMetadataDescriptor::create( std::shared_ptr VRMetadataDescriptor::open( const Dataset& dataset) { + // The VRMetadataLayer has the same dimensions as the overall BAG file + // (since there should be one element for each cell in the mandatory + // layers). Reading this from the dataset layer descriptor enforces this + // and keeps the call signature simpler. + uint32_t rows, cols; + std::tie(rows, cols) = dataset.getDescriptor().getDims(); return std::shared_ptr( - new VRMetadataDescriptor{dataset}); + new VRMetadataDescriptor{dataset, rows, cols}); } diff --git a/api/bag_vrmetadatadescriptor.h b/api/bag_vrmetadatadescriptor.h index 98718c0065..e661993b25 100644 --- a/api/bag_vrmetadatadescriptor.h +++ b/api/bag_vrmetadatadescriptor.h @@ -44,9 +44,9 @@ class BAG_API VRMetadataDescriptor final : public LayerDescriptor VRMetadataDescriptor& setMinResolution(float minResX, float minResY) & noexcept; protected: - VRMetadataDescriptor(uint32_t id, uint64_t chunkSize, + VRMetadataDescriptor(uint32_t id, uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel); - explicit VRMetadataDescriptor(const Dataset& dataset); + explicit VRMetadataDescriptor(const Dataset& dataset, uint32_t rows, uint32_t cols); static std::shared_ptr create(const Dataset& dataset, uint64_t chunkSize, int compressionLevel); diff --git a/api/bag_vrnode.cpp b/api/bag_vrnode.cpp index dc9ba27a8c..0507d97317 100644 --- a/api/bag_vrnode.cpp +++ b/api/bag_vrnode.cpp @@ -166,11 +166,18 @@ std::shared_ptr VRNode::open( descriptor.setMinMaxNSamples(minNSamples, maxNSamples); auto h5dataSet = std::unique_ptr<::H5::DataSet, DeleteH5dataSet>( - new ::H5::DataSet{h5file.openDataSet(VR_REFINEMENT_PATH)}, + new ::H5::DataSet{h5file.openDataSet(VR_NODE_PATH)}, DeleteH5dataSet{}); - return std::make_unique(dataset, - descriptor, std::move(h5dataSet)); + // We need to know the dimensions of the array on file so that we can update the + // descriptor for the layer. + hsize_t dims[2]; // Should be 1D, but you never know ... + int ndims = h5dataSet->getSpace().getSimpleExtentDims(dims, nullptr); + if (ndims != 2) { + throw InvalidVRRefinementDimensions{}; + } + descriptor.setDims(dims[0], dims[1]); + return std::make_unique(dataset, descriptor, std::move(h5dataSet)); } @@ -263,18 +270,20 @@ UInt8Array VRNode::readProxy( const hsize_t columns = (columnEnd - columnStart) + 1; const hsize_t offset = columnStart; - const auto fileDataSpace = m_pH5dataSet->getSpace(); - fileDataSpace.selectHyperslab(H5S_SELECT_SET, &columns, &offset); + const std::array sizes{1, columns}; + const std::array offsets{0, offset}; - const auto bufferSize = pDescriptor->getReadBufferSize(1, - static_cast(columns)); + const auto h5fileDataSpace = m_pH5dataSet->getSpace(); + h5fileDataSpace.selectHyperslab(H5S_SELECT_SET, sizes.data(), offsets.data()); + + const auto bufferSize = pDescriptor->getReadBufferSize(1, columns); UInt8Array buffer{bufferSize}; - const ::H5::DataSpace memDataSpace{1, &columns, &columns}; + const ::H5::DataSpace memDataSpace{kRank, sizes.data(), sizes.data()}; const auto memDataType = makeDataType(); - m_pH5dataSet->read(buffer.data(), memDataType, memDataSpace, fileDataSpace); + m_pH5dataSet->read(buffer.data(), memDataType, memDataSpace, h5fileDataSpace); return buffer; } @@ -354,7 +363,14 @@ void VRNode::writeProxy( throw DatasetNotFound{}; auto pDataset = this->getDataset().lock(); + // TODO: Confirm that this is what we want --- this resets the dimensions of the + // overall BAG, rather than the layer, which means that it's going to set the + // metadata size of the mandatory layers to 1xN ... which is odd. pDataset->getDescriptor().setDims(1, static_cast(newMaxLength)); + // So that the read() call checks correctly against the size of the array, rather + // than the dimensions of the mandatory layer, we need to keep track of the size + // of the layer in the layer-specific descriptor. + pDescriptor->setDims(1, newMaxLength); } fileDataSpace.selectHyperslab(H5S_SELECT_SET, &columns, &offset); diff --git a/api/bag_vrnodedescriptor.cpp b/api/bag_vrnodedescriptor.cpp index aecb0f22ff..2f932619b5 100644 --- a/api/bag_vrnodedescriptor.cpp +++ b/api/bag_vrnodedescriptor.cpp @@ -17,11 +17,13 @@ namespace BAG { */ VRNodeDescriptor::VRNodeDescriptor( uint32_t id, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : LayerDescriptor(id, VR_NODE_PATH, - kLayerTypeMapString.at(VarRes_Node), VarRes_Node, chunkSize, - compressionLevel) + kLayerTypeMapString.at(VarRes_Node), VarRes_Node, + rows, cols, + chunkSize, compressionLevel) { } @@ -31,8 +33,9 @@ VRNodeDescriptor::VRNodeDescriptor( The BAG Dataset this layer belongs to. */ VRNodeDescriptor::VRNodeDescriptor( - const Dataset& dataset) - : LayerDescriptor(dataset, VarRes_Node, VR_NODE_PATH) + const Dataset& dataset, + uint32_t rows, uint32_t cols) + : LayerDescriptor(dataset, VarRes_Node, rows, cols, VR_NODE_PATH) { } @@ -55,7 +58,7 @@ std::shared_ptr VRNodeDescriptor::create( int compressionLevel) { return std::shared_ptr( - new VRNodeDescriptor{dataset.getNextId(), chunkSize, + new VRNodeDescriptor{dataset.getNextId(), 1, 0, chunkSize, compressionLevel}); } @@ -65,13 +68,12 @@ std::shared_ptr VRNodeDescriptor::create( The BAG Dataset this layer belongs to. */ std::shared_ptr VRNodeDescriptor::open( - const Dataset& dataset) + const Dataset& dataset, uint32_t rows, uint32_t cols) { return std::shared_ptr( - new VRNodeDescriptor{dataset}); + new VRNodeDescriptor{dataset, rows, cols}); } - //! \copydoc LayerDescriptor::getDataType DataType VRNodeDescriptor::getDataTypeProxy() const noexcept { diff --git a/api/bag_vrnodedescriptor.h b/api/bag_vrnodedescriptor.h index cd8fd526c9..8de0fe7ed5 100644 --- a/api/bag_vrnodedescriptor.h +++ b/api/bag_vrnodedescriptor.h @@ -43,14 +43,15 @@ class BAG_API VRNodeDescriptor final : public LayerDescriptor uint32_t maxNumHypotheses) & noexcept; protected: - VRNodeDescriptor(uint32_t id, uint64_t chunkSize, + VRNodeDescriptor(uint32_t id, uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel); - explicit VRNodeDescriptor(const Dataset& dataset); + explicit VRNodeDescriptor(const Dataset& dataset, uint32_t rows, uint32_t cols); static std::shared_ptr create(const Dataset& dataset, uint64_t chunkSize, int compressionLevel); - static std::shared_ptr open(const Dataset& dataset); + static std::shared_ptr open(const Dataset& dataset, + uint32_t rows, uint32_t cols); private: DataType getDataTypeProxy() const noexcept override; diff --git a/api/bag_vrrefinements.cpp b/api/bag_vrrefinements.cpp index 291412208d..3f2c7f211c 100644 --- a/api/bag_vrrefinements.cpp +++ b/api/bag_vrrefinements.cpp @@ -4,11 +4,11 @@ #include "bag_vrrefinements.h" #include "bag_vrrefinementsdescriptor.h" +#include #include #include //memset #include - namespace BAG { namespace { @@ -59,10 +59,10 @@ T readAttribute( The layer's descriptor. Will never be nullptr. */ - std::shared_ptr VRRefinements::getDescriptor() & noexcept - { - return std::dynamic_pointer_cast(Layer::getDescriptor()); - } +std::shared_ptr VRRefinements::getDescriptor() & noexcept +{ + return std::dynamic_pointer_cast(Layer::getDescriptor()); +} //! Retrieve the layer's descriptor. Note: this shadows BAG::Layer.getDescriptor() /*! @@ -70,9 +70,10 @@ T readAttribute( The layer's descriptor. Will never be nullptr. */ - std::shared_ptr VRRefinements::getDescriptor() const & noexcept { - return std::dynamic_pointer_cast(Layer::getDescriptor()); - } +std::shared_ptr VRRefinements::getDescriptor() const & noexcept +{ + return std::dynamic_pointer_cast(Layer::getDescriptor()); +} //! Constructor. /*! @@ -151,6 +152,12 @@ std::unique_ptr VRRefinements::open( new ::H5::DataSet{h5file.openDataSet(VR_REFINEMENT_PATH)}, DeleteH5dataSet{}); + hsize_t dims[2]; + int ndims = h5dataSet->getSpace().getSimpleExtentDims(dims, nullptr); + if (ndims != 2) { + throw InvalidVRRefinementDimensions{}; + } + descriptor.setDims(dims[0], dims[1]); return std::unique_ptr(new VRRefinements{dataset, descriptor, std::move(h5dataSet)}); } @@ -237,18 +244,20 @@ UInt8Array VRRefinements::readProxy( const hsize_t columns = (columnEnd - columnStart) + 1; const hsize_t offset = columnStart; - const auto fileDataSpace = m_pH5dataSet->getSpace(); - fileDataSpace.selectHyperslab(H5S_SELECT_SET, &columns, &offset); + const std::array sizes{1, columns}; + const std::array offsets{0, offset}; - const auto bufferSize = pDescriptor->getReadBufferSize(1, - static_cast(columns)); - UInt8Array buffer{bufferSize}; + const auto h5fileDataSpace = m_pH5dataSet->getSpace(); + h5fileDataSpace.selectHyperslab(H5S_SELECT_SET, sizes.data(), offsets.data()); - const ::H5::DataSpace memDataSpace{1, &columns, &columns}; + const auto bufferSize = pDescriptor->getReadBufferSize(1, columns); + UInt8Array buffer{bufferSize}; + + const ::H5::DataSpace memDataSpace{kRank, sizes.data(), sizes.data()}; const auto memDataType = makeDataType(); - m_pH5dataSet->read(buffer.data(), memDataType, memDataSpace, fileDataSpace); + m_pH5dataSet->read(buffer.data(), memDataType, memDataSpace, h5fileDataSpace); return buffer; } @@ -306,6 +315,7 @@ void VRRefinements::writeProxy( const int numDims = fileDataSpace.getSimpleExtentDims(fileLength.data(), maxFileLength.data()); if (numDims != 1) + std::cout << "Number of dimensions for VRRefinements = " << numDims << std::endl; throw InvalidVRRefinementDimensions{}; if (fileLength[0] < (columnEnd + 1)) @@ -321,7 +331,14 @@ void VRRefinements::writeProxy( throw DatasetNotFound{}; auto pDataset = this->getDataset().lock(); + // TODO: Confirm that this is what we want --- this resets the dimensions of the + // overall BAG, rather than the layer, which means that it's going to set the + // metadata size of the mandatory layers to 1xN ... which is odd. pDataset->getDescriptor().setDims(1, static_cast(newMaxLength)); + // So that the read() call checks correctly against the size of the array, rather + // than the dimensions of the mandatory layer, we need to keep track of the size + // of the layer in the layer-specific descriptor. + pDescriptor->setDims(1, newMaxLength); } fileDataSpace.selectHyperslab(H5S_SELECT_SET, &columns, &offset); diff --git a/api/bag_vrrefinementsdescriptor.cpp b/api/bag_vrrefinementsdescriptor.cpp index 8a5630daca..e478e727ed 100644 --- a/api/bag_vrrefinementsdescriptor.cpp +++ b/api/bag_vrrefinementsdescriptor.cpp @@ -17,11 +17,13 @@ namespace BAG { */ VRRefinementsDescriptor::VRRefinementsDescriptor( uint32_t id, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : LayerDescriptor(id, VR_REFINEMENT_PATH, - kLayerTypeMapString.at(VarRes_Refinement), VarRes_Refinement, chunkSize, - compressionLevel) + kLayerTypeMapString.at(VarRes_Refinement), VarRes_Refinement, + rows, cols, + chunkSize, compressionLevel) { } @@ -31,8 +33,8 @@ VRRefinementsDescriptor::VRRefinementsDescriptor( The BAG Dataset this layer belongs to. */ VRRefinementsDescriptor::VRRefinementsDescriptor( - const Dataset& dataset) - : LayerDescriptor(dataset, VarRes_Refinement, VR_REFINEMENT_PATH) + const Dataset& dataset, uint32_t rows, uint32_t cols) + : LayerDescriptor(dataset, VarRes_Refinement, rows, cols, VR_REFINEMENT_PATH) { } @@ -54,7 +56,7 @@ std::shared_ptr VRRefinementsDescriptor::create( int compressionLevel) { return std::shared_ptr( - new VRRefinementsDescriptor{dataset.getNextId(), chunkSize, + new VRRefinementsDescriptor{dataset.getNextId(), 1, 0, chunkSize, compressionLevel}); } @@ -67,10 +69,10 @@ std::shared_ptr VRRefinementsDescriptor::create( The existing variable resolution refinements descriptor. */ std::shared_ptr VRRefinementsDescriptor::open( - const Dataset& dataset) + const Dataset& dataset, uint32_t rows, uint32_t cols) { return std::shared_ptr( - new VRRefinementsDescriptor{dataset}); + new VRRefinementsDescriptor{dataset, rows, cols}); } diff --git a/api/bag_vrrefinementsdescriptor.h b/api/bag_vrrefinementsdescriptor.h index 9fa283adc9..0c7190aa93 100644 --- a/api/bag_vrrefinementsdescriptor.h +++ b/api/bag_vrrefinementsdescriptor.h @@ -38,14 +38,15 @@ class BAG_API VRRefinementsDescriptor final : public LayerDescriptor float maxUncertainty) & noexcept; protected: - VRRefinementsDescriptor(uint32_t id, uint64_t chunkSize, - int compressionLevel); - explicit VRRefinementsDescriptor(const Dataset& dataset); + VRRefinementsDescriptor(uint32_t id, uint32_t rows, uint32_t cols, + uint64_t chunkSize, int compressionLevel); + explicit VRRefinementsDescriptor(const Dataset& dataset, uint32_t rows, uint32_t cols); static std::shared_ptr create(const Dataset& dataset, uint64_t chunkSize, int compressionLevel); - static std::shared_ptr open(const Dataset& dataset); + static std::shared_ptr open(const Dataset& dataset, + uint32_t rows, uint32_t cols); private: DataType getDataTypeProxy() const noexcept override;