Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WIP: 109 layer descriptor shape parameters #110

Draft
wants to merge 4 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions api/bag.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -824,7 +824,7 @@ BagError bagGetErrorString(
strncpy(str, "Metadata One or more elements of the requested coverage are missing from the XML file", MAX_STR-1);
break;
case BAG_METADTA_INVLID_DIMENSIONS:
sprintf(str, "Metadata The number of dimensions is incorrect (not equal to %d)", RANK);
snprintf(str, MAX_STR, "Metadata The number of dimensions is incorrect (not equal to %d)", RANK);
break;
case BAG_METADTA_BUFFER_EXCEEDED:
strncpy(str, "Metadata supplied buffer is too large to be stored in the internal array", MAX_STR-1);
Expand Down Expand Up @@ -866,7 +866,7 @@ BagError bagGetErrorString(
strncpy(str, "HDF Bag is not an HDF5 File", MAX_STR-1);
break;
case BAG_HDF_RANK_INCOMPATIBLE:
sprintf(str, "HDF Bag's rank is incompatible with expected Rank of the Datasets: %d", RANK);
snprintf(str, MAX_STR, "HDF Bag's rank is incompatible with expected Rank of the Datasets: %d", RANK);
break;
case BAG_HDF_TYPE_NOT_FOUND:
strncpy(str, "HDF Bag surface Datatype parameter not available", MAX_STR-1);
Expand Down
27 changes: 18 additions & 9 deletions api/bag_dataset.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -501,12 +501,12 @@ void Dataset::createDataset(

// Mandatory Layers
// Elevation
this->addLayer(SimpleLayer::create(*this, Elevation, chunkSize,
compressionLevel));
this->addLayer(SimpleLayer::create(*this, Elevation, m_pMetadata->rows(), m_pMetadata->columns(),
chunkSize, compressionLevel));

// Uncertainty
this->addLayer(SimpleLayer::create(*this, Uncertainty, chunkSize,
compressionLevel));
this->addLayer(SimpleLayer::create(*this, Uncertainty, m_pMetadata->rows(), m_pMetadata->columns(),
chunkSize, compressionLevel));
}

//! Create an optional simple layer.
Expand Down Expand Up @@ -545,8 +545,8 @@ Layer& Dataset::createSimpleLayer(
case Num_Soundings: //[[fallthrough]];
case Average_Elevation: //[[fallthrough]];
case Nominal_Elevation:
return this->addLayer(SimpleLayer::create(*this, type, chunkSize,
compressionLevel));
return this->addLayer(SimpleLayer::create(*this, type,
m_pMetadata->rows(), m_pMetadata->columns(), chunkSize, compressionLevel));
case Surface_Correction: //[[fallthrough]];
case Georef_Metadata: //[[fallthrough]];
default:
Expand Down Expand Up @@ -1096,7 +1096,10 @@ void Dataset::readDataset(

H5Dclose(id);

auto layerDesc = SimpleLayerDescriptor::open(*this, layerType);
// Pre-stage the layer-specific desciptor. Note that we don't need to specify the
// dimensions of the layer here, since they're set from the HDF5 dataset when it
// gets opened with SimpleLayer::open().
auto layerDesc = SimpleLayerDescriptor::open(*this, layerType, 0, 0);
this->addLayer(SimpleLayer::open(*this, *layerDesc));
}

Expand Down Expand Up @@ -1159,7 +1162,10 @@ void Dataset::readDataset(
}

{
auto descriptor = VRRefinementsDescriptor::open(*this);
// Pre-stage the layer-specific descriptor for the refinements; note that this
// doesn't have to have specific dimensions since they're set when the refinements
// layer is read in VRRefinements::open().
auto descriptor = VRRefinementsDescriptor::open(*this, 0, 0);
this->addLayer(VRRefinements::open(*this, *descriptor));
}

Expand All @@ -1169,7 +1175,10 @@ void Dataset::readDataset(
{
H5Dclose(id);

auto descriptor = VRNodeDescriptor::open(*this);
// Pre-stage the layer-specific descriptor for the nodes; note that this doesn't
// have to have specific dimensions since they're set when the nodes layer is
// read in VRNode::open().
auto descriptor = VRNodeDescriptor::open(*this, 0, 0);
this->addLayer(VRNode::open(*this, *descriptor));
}
}
Expand Down
24 changes: 20 additions & 4 deletions api/bag_georefmetadatalayer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,13 @@ std::shared_ptr<GeorefMetadataLayer> GeorefMetadataLayer::create(
keyType != DT_UINT64)
throw InvalidKeyType{};

// The keys array should be the same dimensions as the mandatory elevation layer, so read
// from the file global descriptor, and set.
uint32_t rows = 0, cols = 0;
std::tie<uint32_t, uint32_t>(rows, cols) = dataset.getDescriptor().getDims();
auto pDescriptor = GeorefMetadataLayerDescriptor::create(dataset, name, profile, keyType,
definition, chunkSize, compressionLevel);
definition, rows, cols,
chunkSize, compressionLevel);

// Create the H5 Group to hold keys & values.
const auto& h5file = dataset.getH5file();
Expand All @@ -122,7 +127,8 @@ std::shared_ptr<GeorefMetadataLayer> GeorefMetadataLayer::create(
auto h5valueDataSet = GeorefMetadataLayer::createH5valueDataSet(dataset, *pDescriptor);

auto layer = std::make_shared<GeorefMetadataLayer>(dataset,
*pDescriptor, std::move(h5keyDataSet), std::move(h5vrKeyDataSet),
*pDescriptor, std::move(h5keyDataSet),
std::move(h5vrKeyDataSet),
std::move(h5valueDataSet));

layer->setValueTable(std::unique_ptr<ValueTable>(new ValueTable{*layer}));
Expand Down Expand Up @@ -150,6 +156,12 @@ std::shared_ptr<GeorefMetadataLayer> GeorefMetadataLayer::open(
new ::H5::DataSet{h5file.openDataSet(internalPath + COMPOUND_KEYS)},
DeleteH5dataSet{});

// The keys array has the dimensions of the layer, so we can read and reset the
// descriptor dimensions, in case they were inconsistent (or not set).
std::array<hsize_t, kRank> dims;
h5keyDataSet->getSpace().getSimpleExtentDims(dims.data(), nullptr);
descriptor.setDims(dims[0], dims[1]);

std::unique_ptr<::H5::DataSet, DeleteH5dataSet> h5vrKeyDataSet{};
if (dataset.getVRMetadata())
h5vrKeyDataSet = std::unique_ptr<::H5::DataSet, DeleteH5dataSet>(
Expand All @@ -161,7 +173,9 @@ std::shared_ptr<GeorefMetadataLayer> GeorefMetadataLayer::open(
DeleteH5dataSet{});

auto layer = std::make_shared<GeorefMetadataLayer>(dataset,
descriptor, std::move(h5keyDataSet), std::move(h5vrKeyDataSet),
descriptor,
std::move(h5keyDataSet),
std::move(h5vrKeyDataSet),
std::move(h5valueDataSet));

layer->setValueTable(std::unique_ptr<ValueTable>(new ValueTable{*layer}));
Expand All @@ -188,7 +202,9 @@ GeorefMetadataLayer::createH5keyDataSet(
std::unique_ptr<::H5::DataSet, DeleteH5dataSet> pH5dataSet;

{
// Use the dimensions from the descriptor.
// Use the dimensions from the descriptor. We could do this from the specific
// descriptor for the layer, too, which should mirror the size of the file global
// descriptor used here.
uint32_t dim0 = 0, dim1 = 0;
std::tie(dim0, dim1) = dataset.getDescriptor().getDims();
const std::array<hsize_t, kRank> fileDims{dim0, dim1};
Expand Down
4 changes: 2 additions & 2 deletions api/bag_georefmetadatalayer.h
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@ class BAG_API GeorefMetadataLayer final : public Layer
protected:
static std::shared_ptr<GeorefMetadataLayer> create(DataType keyType,
const std::string& name, GeorefMetadataProfile profile, Dataset& dataset,
const RecordDefinition& definition, uint64_t chunkSize,
int compressionLevel);
const RecordDefinition& definition,
uint64_t chunkSize, int compressionLevel);
static std::shared_ptr<GeorefMetadataLayer> open(Dataset& dataset,
GeorefMetadataLayerDescriptor& descriptor);

Expand Down
12 changes: 10 additions & 2 deletions api/bag_georefmetadatalayerdescriptor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,11 @@ GeorefMetadataLayerDescriptor::GeorefMetadataLayerDescriptor(
GeorefMetadataProfile profile,
DataType keyType,
RecordDefinition definition,
uint32_t rows, uint32_t cols,
uint64_t chunkSize,
int compressionLevel)
: LayerDescriptor(dataset.getNextId(), GEOREF_METADATA_PATH + name, name,
Georef_Metadata, chunkSize, compressionLevel)
Georef_Metadata, rows, cols, chunkSize, compressionLevel)
, m_pBagDataset(dataset.shared_from_this())
, m_profile(profile)
, m_keyType(keyType)
Expand Down Expand Up @@ -72,12 +73,14 @@ std::shared_ptr<GeorefMetadataLayerDescriptor> GeorefMetadataLayerDescriptor::cr
GeorefMetadataProfile profile,
DataType keyType,
RecordDefinition definition,
uint32_t rows, uint32_t cols,
uint64_t chunkSize,
int compressionLevel)
{
return std::shared_ptr<GeorefMetadataLayerDescriptor>(
new GeorefMetadataLayerDescriptor{dataset, name, profile, keyType,
std::move(definition), chunkSize, compressionLevel});
std::move(definition), rows, cols,
chunkSize, compressionLevel});
}

//! Open an existing georeferenced metadata layer descriptor.
Expand Down Expand Up @@ -165,8 +168,13 @@ std::shared_ptr<GeorefMetadataLayerDescriptor> GeorefMetadataLayerDescriptor::op
profile = UNKNOWN_METADATA_PROFILE;
}

std::array<hsize_t, 2> dims;
h5dataSet.getSpace().getSimpleExtentDims(dims.data(), nullptr);

return std::shared_ptr<GeorefMetadataLayerDescriptor>(
new GeorefMetadataLayerDescriptor{dataset, name, profile, keyType, definition,
static_cast<const uint32_t>(dims[0]),
static_cast<const uint32_t>(dims[1]),
chunkSize, compressionLevel});
}

Expand Down
16 changes: 9 additions & 7 deletions api/bag_georefmetadatalayerdescriptor.h
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,13 @@ namespace BAG {
class BAG_API GeorefMetadataLayerDescriptor final : public LayerDescriptor
{
public:
static std::shared_ptr<GeorefMetadataLayerDescriptor> create(Dataset& dataset,
const std::string& name, GeorefMetadataProfile profile, DataType keyType,
RecordDefinition definition, uint64_t chunkSize,
int compressionLevel);
static std::shared_ptr<GeorefMetadataLayerDescriptor> open(Dataset& dataset,
const std::string& name);
static std::shared_ptr<GeorefMetadataLayerDescriptor>
create(Dataset& dataset,
const std::string& name, GeorefMetadataProfile profile, DataType keyType,
RecordDefinition definition, uint32_t rows, uint32_t cols,
uint64_t chunkSize, int compressionLevel);
static std::shared_ptr<GeorefMetadataLayerDescriptor>
open(Dataset& dataset, const std::string& name);

GeorefMetadataLayerDescriptor(const GeorefMetadataLayerDescriptor&) = delete;
GeorefMetadataLayerDescriptor(GeorefMetadataLayerDescriptor&&) = delete;
Expand All @@ -52,7 +53,8 @@ class BAG_API GeorefMetadataLayerDescriptor final : public LayerDescriptor

protected:
GeorefMetadataLayerDescriptor(Dataset& dataset, const std::string& name, GeorefMetadataProfile profile,
DataType keyType, RecordDefinition definition, uint64_t chunkSize,
DataType keyType, RecordDefinition definition,
uint32_t rows, uint32_t cols, uint64_t chunkSize,
int compressionLevel);

private:
Expand Down
4 changes: 4 additions & 0 deletions api/bag_interleavedlegacylayer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,10 @@ std::shared_ptr<InterleavedLegacyLayer> InterleavedLegacyLayer::open(
descriptor.setMinMax(std::get<1>(possibleMinMax),
std::get<2>(possibleMinMax));

std::array<hsize_t, 2> dims;
h5dataSet->getSpace().getSimpleExtentDims(dims.data(), nullptr);
descriptor.setDims(dims[0], dims[1]);

return std::make_shared<InterleavedLegacyLayer>(dataset,
descriptor, std::move(h5dataSet));
}
Expand Down
17 changes: 12 additions & 5 deletions api/bag_interleavedlegacylayerdescriptor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,10 @@ namespace BAG {
InterleavedLegacyLayerDescriptor::InterleavedLegacyLayerDescriptor(
uint32_t id,
LayerType layerType,
GroupType groupType)
GroupType groupType,
uint32_t rows, uint32_t cols)
: LayerDescriptor(id, Layer::getInternalPath(layerType),
kLayerTypeMapString.at(layerType), layerType, 0, 0)
kLayerTypeMapString.at(layerType), layerType, rows, cols, 0, 0)
, m_groupType(groupType)
, m_elementSize(Layer::getElementSize(Layer::getDataType(layerType)))
{
Expand All @@ -45,8 +46,10 @@ InterleavedLegacyLayerDescriptor::InterleavedLegacyLayerDescriptor(
InterleavedLegacyLayerDescriptor::InterleavedLegacyLayerDescriptor(
const Dataset& dataset,
LayerType layerType,
GroupType groupType)
GroupType groupType,
uint32_t rows, uint32_t cols)
: LayerDescriptor(dataset, layerType,
rows, cols,
groupType == NODE
? NODE_GROUP_PATH
: groupType == ELEVATION
Expand Down Expand Up @@ -76,9 +79,11 @@ std::shared_ptr<InterleavedLegacyLayerDescriptor> InterleavedLegacyLayerDescript
LayerType layerType,
GroupType groupType)
{
uint32_t rows, cols;
std::tie(rows, cols) = dataset.getDescriptor().getDims();
return std::shared_ptr<InterleavedLegacyLayerDescriptor>(
new InterleavedLegacyLayerDescriptor{dataset.getNextId(), layerType,
groupType});
groupType, rows, cols});
}

//! Open an interleaved layer descriptor.
Expand All @@ -99,8 +104,10 @@ std::shared_ptr<InterleavedLegacyLayerDescriptor> InterleavedLegacyLayerDescript
LayerType layerType,
GroupType groupType)
{
uint32_t rows, cols;
std::tie<uint32_t, uint32_t>(rows, cols) = dataset.getDescriptor().getDims();
return std::shared_ptr<InterleavedLegacyLayerDescriptor>(
new InterleavedLegacyLayerDescriptor{dataset, layerType, groupType});
new InterleavedLegacyLayerDescriptor{dataset, layerType, groupType, rows, cols});
}


Expand Down
4 changes: 2 additions & 2 deletions api/bag_interleavedlegacylayerdescriptor.h
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,9 @@ class BAG_API InterleavedLegacyLayerDescriptor final : public LayerDescriptor

protected:
InterleavedLegacyLayerDescriptor(uint32_t id, LayerType layerType,
GroupType groupType);
GroupType groupType, uint32_t rows, uint32_t cols);
InterleavedLegacyLayerDescriptor(const Dataset& dataset, LayerType layerType,
GroupType groupType);
GroupType groupType, uint32_t rows, uint32_t cols);

private:
static void validateTypes(LayerType layerType, GroupType groupType);
Expand Down
3 changes: 1 addition & 2 deletions api/bag_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -208,9 +208,8 @@ UInt8Array Layer::read(
if (m_pBagDataset.expired())
throw DatasetNotFound{};

const auto pDataset = m_pBagDataset.lock();
uint32_t numRows = 0, numColumns = 0;
std::tie(numRows, numColumns) = pDataset->getDescriptor().getDims();
std::tie(numRows, numColumns) = m_pLayerDescriptor->getDims();

if (columnEnd >= numColumns || rowEnd >= numRows)
throw InvalidReadSize{};
Expand Down
20 changes: 20 additions & 0 deletions api/bag_layerdescriptor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ LayerDescriptor::LayerDescriptor(
std::string internalPath,
std::string name,
LayerType type,
uint32_t rows, uint32_t cols,
uint64_t chunkSize,
int compressionLevel)
: m_id(id)
Expand All @@ -39,6 +40,7 @@ LayerDescriptor::LayerDescriptor(
, m_compressionLevel(compressionLevel)
, m_chunkSize(chunkSize)
, m_minMax(std::numeric_limits<float>::max(), std::numeric_limits<float>::lowest())
, m_dims({rows, cols})
{
}

Expand All @@ -56,11 +58,13 @@ LayerDescriptor::LayerDescriptor(
LayerDescriptor::LayerDescriptor(
const Dataset& dataset,
LayerType type,
uint32_t rows, uint32_t cols,
std::string internalPath,
std::string name)
: m_id(dataset.getNextId())
, m_layerType(type)
, m_minMax(std::numeric_limits<float>::max(), std::numeric_limits<float>::lowest())
, m_dims({rows, cols})
{
m_internalPath = internalPath.empty()
? Layer::getInternalPath(type)
Expand Down Expand Up @@ -169,6 +173,16 @@ const std::string& LayerDescriptor::getName() const & noexcept
return m_name;
}

//! Retrieve the dimensions (shape) of the layer
/*!
\return
The row and column spacing/resolution of the grid
*/
const std::tuple<uint32_t, uint32_t>& LayerDescriptor::getDims() const & noexcept
{
return m_dims;
}

//! Get the size of a buffer for reading a specified number rows and columns.
/*!
\param rows
Expand Down Expand Up @@ -204,6 +218,12 @@ LayerDescriptor& LayerDescriptor::setMinMax(
return *this;
}

LayerDescriptor& LayerDescriptor::setDims(uint32_t rows, uint32_t cols) & noexcept
{
m_dims = {rows, cols};
return *this;
}

//! Set the HDF5 path of the layer.
/*!
\param inPath
Expand Down
8 changes: 7 additions & 1 deletion api/bag_layerdescriptor.h
Original file line number Diff line number Diff line change
Expand Up @@ -53,14 +53,18 @@ class BAG_API LayerDescriptor : public std::enable_shared_from_this<LayerDescrip
LayerType getLayerType() const noexcept;
std::tuple<float, float> getMinMax() const noexcept;
const std::string& getName() const & noexcept;
const std::tuple<uint32_t, uint32_t>& getDims() const & noexcept;

LayerDescriptor& setName(std::string inName) & noexcept;
LayerDescriptor& setMinMax(float min, float max) & noexcept;
LayerDescriptor& setDims(uint32_t rows, uint32_t cols) & noexcept;

protected:
LayerDescriptor(uint32_t id, std::string internalPath, std::string name,
LayerType type, uint64_t chunkSize, int compressionLevel);
LayerType type, uint32_t rows, uint32_t cols, uint64_t chunkSize,
int compressionLevel);
LayerDescriptor(const Dataset& dataset, LayerType type,
uint32_t rows, uint32_t cols,
std::string internalPath = {}, std::string name = {});

size_t getReadBufferSize(uint32_t rows, uint32_t columns) const noexcept;
Expand All @@ -85,6 +89,8 @@ class BAG_API LayerDescriptor : public std::enable_shared_from_this<LayerDescrip
uint64_t m_chunkSize = 0;
//! The minimum and maximum value of this dataset.
std::tuple<float, float> m_minMax{};
//! The dimensions of the layer
std::tuple<uint32_t, uint32_t> m_dims{};

friend GeorefMetadataLayer;
friend InterleavedLegacyLayer;
Expand Down
Loading
Loading