Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Complex Number Support #233

Merged
merged 4 commits into from
Oct 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions include/z5/filesystem/factory.hxx
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,12 @@ namespace filesystem {
ptr.reset(new Dataset<float>(dataset, metadata)); break;
case types::float64:
ptr.reset(new Dataset<double>(dataset, metadata)); break;
case types::complex64:
ptr.reset(new Dataset<std::complex<float>>(dataset, metadata)); break;
case types::complex128:
ptr.reset(new Dataset<std::complex<double>>(dataset, metadata)); break;
case types::complex256:
ptr.reset(new Dataset<std::complex<long double>>(dataset, metadata)); break;
}
return ptr;
}
Expand Down Expand Up @@ -78,6 +84,12 @@ namespace filesystem {
ptr.reset(new Dataset<float>(dataset, metadata)); break;
case types::float64:
ptr.reset(new Dataset<double>(dataset, metadata)); break;
case types::complex64:
ptr.reset(new Dataset<std::complex<float>>(dataset, metadata)); break;
case types::complex128:
ptr.reset(new Dataset<std::complex<double>>(dataset, metadata)); break;
case types::complex256:
ptr.reset(new Dataset<std::complex<long double>>(dataset, metadata)); break;
}
return ptr;
}
Expand Down
2 changes: 2 additions & 0 deletions include/z5/metadata.hxx
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,8 @@ namespace z5 {
} else {
throw std::runtime_error("Invalid string value for fillValue");
}
} else if(fillValJson.type() == nlohmann::json::value_t::null) {
fillValue = std::numeric_limits<double>::quiet_NaN();
} else {
fillValue = static_cast<double>(fillValJson);
}
Expand Down
10 changes: 7 additions & 3 deletions include/z5/types/types.hxx
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
#include <string>
#include <map>
#include <variant>
#include <complex>

#include "nlohmann/json.hpp"

Expand All @@ -28,7 +29,8 @@ namespace types {
enum Datatype {
int8, int16, int32, int64,
uint8, uint16, uint32, uint64,
float32, float64
float32, float64,
complex64, complex128, complex256
};

struct Datatypes {
Expand All @@ -38,15 +40,17 @@ namespace types {
static DtypeMap & zarrToDtype() {
static DtypeMap dtypeMap({{{"|i1", int8}, {"<i2", int16}, {"<i4", int32}, {"<i8", int64},
{"|u1", uint8}, {"<u2", uint16}, {"<u4", uint32}, {"<u8", uint64},
{"<f4", float32}, {"<f8", float64}}});
{"<f4", float32}, {"<f8", float64},
{"<c8", complex64}, {"<c16", complex128}, {"<c32", complex256}}});
return dtypeMap;
}

static InverseDtypeMap & dtypeToZarr() {

static InverseDtypeMap dtypeMap({{{int8 , "|i1"}, {int16, "<i2"}, {int32, "<i4"}, {int64, "<i8"},
{uint8 , "|u1"}, {uint16, "<u2"}, {uint32, "<u4"},{uint64,"<u8"},
{float32, "<f4"}, {float64,"<f8"}}});
{float32, "<f4"}, {float64,"<f8"},
{complex64, "<c8"}, {complex128, "<c16"}, {complex256, "<c32"}}});
return dtypeMap;
}

Expand Down
16 changes: 16 additions & 0 deletions src/python/lib/dataset.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,10 @@ namespace z5 {
// float types
exportIoT<float>(module, "float32");
exportIoT<double>(module, "float64");
// complex types
exportIoT<std::complex<float>>(module, "complex64");
exportIoT<std::complex<double>>(module, "complex128");
exportIoT<std::complex<long double>>(module, "complex256");

// export writing scalars
// The overloads cannot be properly resolved,
Expand Down Expand Up @@ -281,6 +285,18 @@ namespace z5 {
static_cast<double>(val),
numberOfThreads);
break;
case types::Datatype::complex64 : writePyScalar<std::complex<float>>(ds, roiBegin, roiShape,
static_cast<std::complex<float>>(val),
numberOfThreads);
break;
case types::Datatype::complex128 : writePyScalar<std::complex<double>>(ds, roiBegin, roiShape,
static_cast<std::complex<double>>(val),
numberOfThreads);
break;
case types::Datatype::complex256 : writePyScalar<std::complex<long double>>(ds, roiBegin, roiShape,
static_cast<std::complex<long double>>(val),
numberOfThreads);
break;
default: throw(std::runtime_error("Invalid datatype"));

}
Expand Down
101 changes: 93 additions & 8 deletions src/test/test_dataset.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,11 @@ namespace z5 {
class DatasetTest : public ::testing::Test {

protected:
DatasetTest() : fileHandle_("data.zr"), floatHandle_(fileHandle_, "float"), intHandle_(fileHandle_, "int") {
DatasetTest() : fileHandle_("data.zr"), intHandle_(fileHandle_, "int"), floatHandle_(fileHandle_, "float"), complexFloatHandle_(fileHandle_, "complexFloat") {
// int zarray metadata
jInt_ = "{ \"chunks\": [10, 10, 10], \"compressor\": { \"clevel\": 5, \"cname\": \"lz4\", \"id\": \"blosc\", \"shuffle\": 1}, \"dtype\": \"<i4\", \"fill_value\": 42, \"filters\": null, \"order\": \"C\", \"shape\": [100, 100, 100], \"zarr_format\": 2}"_json;
jFloat_ = "{ \"chunks\": [10, 10, 10], \"compressor\": { \"clevel\": 5, \"cname\": \"lz4\", \"id\": \"blosc\", \"shuffle\": 1}, \"dtype\": \"<i4\", \"fill_value\": 42, \"filters\": null, \"order\": \"C\", \"shape\": [100, 100, 100], \"zarr_format\": 2}"_json;
jFloat_ = "{ \"chunks\": [10, 10, 10], \"compressor\": { \"clevel\": 5, \"cname\": \"lz4\", \"id\": \"blosc\", \"shuffle\": 1}, \"dtype\": \"<f4\", \"fill_value\": 42, \"filters\": null, \"order\": \"C\", \"shape\": [100, 100, 100], \"zarr_format\": 2}"_json;
jComplexFloat_ = "{ \"chunks\": [10, 10, 10], \"compressor\": { \"clevel\": 5, \"cname\": \"lz4\", \"id\": \"blosc\", \"shuffle\": 1}, \"dtype\": \"<c8\", \"fill_value\": 42, \"filters\": null, \"order\": \"C\", \"shape\": [100, 100, 100], \"zarr_format\": 2}"_json;

}

Expand Down Expand Up @@ -44,20 +45,31 @@ namespace z5 {
dataFloat_[i] = drawFloat();
}

// fill 'dataComplexFloat_' with random values
for(std::size_t i = 0; i < size_; ++i) {
dataComplexFloat_[i].real(drawFloat());
dataComplexFloat_[i].imag(drawFloat());
}

//
// create files for reading
//
fileHandle_.create();
floatHandle_.create();
intHandle_.create();

DatasetMetadata floatMeta;
floatMeta.fromJson(jFloat_, true);
filesystem::writeMetadata(floatHandle_, floatMeta);
floatHandle_.create();
complexFloatHandle_.create();

DatasetMetadata intMeta;
intMeta.fromJson(jInt_, true);
filesystem::writeMetadata(intHandle_, intMeta);

DatasetMetadata floatMeta;
floatMeta.fromJson(jFloat_, true);
filesystem::writeMetadata(floatHandle_, floatMeta);

DatasetMetadata complexFloatMeta;
complexFloatMeta.fromJson(jComplexFloat_, true);
filesystem::writeMetadata(complexFloatHandle_, complexFloatMeta);
}

virtual void TearDown() {
Expand All @@ -66,15 +78,18 @@ namespace z5 {
}

filesystem::handle::File fileHandle_;
filesystem::handle::Dataset floatHandle_;
filesystem::handle::Dataset intHandle_;
filesystem::handle::Dataset floatHandle_;
filesystem::handle::Dataset complexFloatHandle_;

nlohmann::json jInt_;
nlohmann::json jFloat_;
nlohmann::json jComplexFloat_;

static const std::size_t size_ = 10*10*10;
int dataInt_[size_];
float dataFloat_[size_];
std::complex<float> dataComplexFloat_[size_];

};

Expand Down Expand Up @@ -222,6 +237,76 @@ namespace z5 {
}
}

TEST_F(DatasetTest, OpenComplexFloatDataset) {

auto ds = openDataset(fileHandle_, "complexFloat");
const auto & chunksPerDim = ds->chunksPerDimension();

std::default_random_engine generator;

// test uninitialized chunk -> this is expected to throw a runtime error
std::complex<float> dataTmp[size_];
ASSERT_THROW(ds->readChunk(types::ShapeType({0, 0, 0}), dataTmp), std::runtime_error);

// test for 10 random chunks
for(unsigned t = 0; t < 10; ++t) {

// get a random chunk
types::ShapeType chunkId(ds->dimension());
for(unsigned d = 0; d < ds->dimension(); ++d) {
std::uniform_int_distribution<std::size_t> distr(0, chunksPerDim[d] - 1);
chunkId[d] = distr(generator);
}

ds->writeChunk(chunkId, dataComplexFloat_);

// read a chunk
std::complex<float> dataTmp[size_];
ds->readChunk(chunkId, dataTmp);

// check
for(std::size_t i = 0; i < size_; ++i) {
ASSERT_EQ(dataTmp[i], dataComplexFloat_[i]);
}
}
}

TEST_F(DatasetTest, CreateComplexFloatDataset) {

DatasetMetadata complexFloatMeta;
complexFloatMeta.fromJson(jComplexFloat_, true);

auto ds = createDataset(fileHandle_, "complexFloat1", complexFloatMeta);
const auto & chunksPerDim = ds->chunksPerDimension();

std::default_random_engine generator;

// test uninitialized chunk -> this is expected to throw a runtime error
std::complex<float> dataTmp[size_];
ASSERT_THROW(ds->readChunk(types::ShapeType({0, 0, 0}), dataTmp), std::runtime_error);

// test for 10 random chunks
for(unsigned t = 0; t < 10; ++t) {

// get a random chunk
types::ShapeType chunkId(ds->dimension());
for(unsigned d = 0; d < ds->dimension(); ++d) {
std::uniform_int_distribution<std::size_t> distr(0, chunksPerDim[d] - 1);
chunkId[d] = distr(generator);
}

ds->writeChunk(chunkId, dataComplexFloat_);

// read a chunk
std::complex<float> dataTmp[size_];
ds->readChunk(chunkId, dataTmp);

// check
for(std::size_t i = 0; i < size_; ++i) {
ASSERT_EQ(dataTmp[i], dataComplexFloat_[i]);
}
}
}

TEST_F(DatasetTest, CreateBloscDataset) {

Expand Down