Skip to content

Commit

Permalink
Merge pull request #855 from PowerGridModel/feature/deserialized-api-…
Browse files Browse the repository at this point in the history
…tests

Feature/deserialized api tests
  • Loading branch information
mgovers authored Dec 16, 2024
2 parents df078ff + 336e1af commit 8dede6b
Show file tree
Hide file tree
Showing 12 changed files with 640 additions and 607 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ repos:
hooks:
- id: black-jupyter
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.11.2
rev: v1.13.0
hooks:
- id: mypy
additional_dependencies:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,17 @@
#include "handle.hpp"

#include "power_grid_model_c/dataset.h"

namespace power_grid_model_cpp {
class ComponentTypeNotFound : public PowerGridError {
public:
ComponentTypeNotFound(std::string const& component)
: PowerGridError{[&]() {
using namespace std::string_literals;
return "ComponentType"s + component + " not found"s;
}()} {}
ComponentTypeNotFound(std::string_view component) : ComponentTypeNotFound{std::string{component}} {}
};

class DatasetInfo {

Expand Down Expand Up @@ -43,6 +53,16 @@ class DatasetInfo {
return handle_.call_with(PGM_dataset_info_total_elements, info_, component_idx);
}

Idx component_idx(std::string_view component) const {
Idx const n_comp = n_components();
for (Idx idx = 0; idx < n_comp; ++idx) {
if (component_name(idx) == component) {
return idx;
}
}
throw ComponentTypeNotFound{component};
}

private:
Handle handle_{};
RawDatasetInfo const* info_;
Expand Down Expand Up @@ -171,6 +191,16 @@ class DatasetConst {
detail::UniquePtr<RawConstDataset, &PGM_destroy_dataset_const> dataset_;
DatasetInfo info_;
};

struct OwningMemory {
std::vector<Buffer> buffers;
std::vector<std::vector<Idx>> indptrs;
};

struct OwningDataset {
DatasetMutable dataset;
OwningMemory storage{};
};
} // namespace power_grid_model_cpp

#endif // POWER_GRID_MODEL_CPP_DATASET_HPP
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,8 @@ class PowerGridBatchError : public PowerGridError {
std::string error_message;
};

PowerGridBatchError(std::string const& message, std::vector<FailedScenario> failed_scenarios_c)
: PowerGridError{message}, failed_scenarios_{std::move(failed_scenarios_c)} {}
PowerGridBatchError(std::string message, std::vector<FailedScenario> failed_scenarios_c)
: PowerGridError{std::move(message)}, failed_scenarios_{std::move(failed_scenarios_c)} {}
Idx error_code() const noexcept override { return PGM_batch_error; }
std::vector<FailedScenario> const& failed_scenarios() const { return failed_scenarios_; }

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
#include "basics.hpp"
#include "dataset.hpp"
#include "handle.hpp"
#include "meta_data.hpp"

#include "power_grid_model_c/serialization.h"

Expand Down Expand Up @@ -90,6 +91,33 @@ class Serializer {
power_grid_model_cpp::Handle handle_{};
detail::UniquePtr<RawSerializer, &PGM_destroy_serializer> serializer_;
};

inline OwningDataset create_owning_dataset(DatasetWritable& writable_dataset) {
auto const& info = writable_dataset.get_info();
bool const is_batch = info.is_batch();
Idx const batch_size = info.batch_size();
auto const& dataset_name = info.name();
DatasetMutable dataset_mutable{dataset_name, is_batch, batch_size};
OwningMemory storage{};

for (Idx component_idx{}; component_idx < info.n_components(); ++component_idx) {
auto const& component_name = info.component_name(component_idx);
auto const& component_meta = MetaData::get_component_by_name(dataset_name, component_name);
Idx const component_size = info.component_total_elements(component_idx);
Idx const elements_per_scenario = info.component_elements_per_scenario(component_idx);

auto& current_indptr = storage.indptrs.emplace_back(elements_per_scenario < 0 ? batch_size + 1 : 0);
if (!current_indptr.empty()) {
current_indptr.at(0) = 0;
current_indptr.at(batch_size) = component_size;
}
Idx* const indptr = current_indptr.empty() ? nullptr : current_indptr.data();
auto& current_buffer = storage.buffers.emplace_back(component_meta, component_size);
writable_dataset.set_buffer(component_name, indptr, current_buffer);
dataset_mutable.add_buffer(component_name, elements_per_scenario, component_size, indptr, current_buffer);
}
return OwningDataset{.dataset = std::move(dataset_mutable), .storage = std::move(storage)};
}
} // namespace power_grid_model_cpp

#endif // POWER_GRID_MODEL_CPP_SERIALIZATION_HPP
2 changes: 2 additions & 0 deletions tests/cpp_unit_tests/test_math_solver_common.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@

// In this unit test the powerflow solvers are tested

#pragma once

#include <power_grid_model/calculation_parameters.hpp>
#include <power_grid_model/common/exception.hpp>
#include <power_grid_model/common/three_phase_tensor.hpp>
Expand Down
2 changes: 2 additions & 0 deletions tests/cpp_unit_tests/test_math_solver_pf.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@

// In this unit test the powerflow solvers are tested

#pragma once

#include "test_math_solver_common.hpp"

#include <power_grid_model/common/calculation_info.hpp>
Expand Down
2 changes: 2 additions & 0 deletions tests/cpp_unit_tests/test_math_solver_se.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@

// In this unit test the powerflow solvers are tested

#pragma once

#include "test_math_solver_common.hpp"

#include <power_grid_model/common/calculation_info.hpp>
Expand Down
2 changes: 2 additions & 0 deletions tests/cpp_unit_tests/test_optimizer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
//
// SPDX-License-Identifier: MPL-2.0

#pragma once

#include <power_grid_model/auxiliary/meta_data.hpp>
#include <power_grid_model/auxiliary/meta_gen/gen_getters.hpp>
#include <power_grid_model/container.hpp>
Expand Down
92 changes: 15 additions & 77 deletions tests/cpp_validation_tests/test_validation.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

#define PGM_ENABLE_EXPERIMENTAL

#include "power_grid_model_cpp.hpp"
#include <power_grid_model_cpp.hpp>

#include <doctest/doctest.h>
#include <nlohmann/json.hpp>
Expand Down Expand Up @@ -35,15 +35,6 @@ class UnsupportedValidationCase : public PowerGridError {
}()} {}
};

class OptionalNotInitialized : public PowerGridError {
public:
OptionalNotInitialized(std::string const& object)
: PowerGridError{[&]() {
using namespace std::string_literals;
return "Optional "s + object + " object not initialized"s;
}()} {}
};

using nlohmann::json;

auto read_file(std::filesystem::path const& path) {
Expand All @@ -60,72 +51,26 @@ auto read_json(std::filesystem::path const& path) {
return j;
}

struct OwningMemory {
std::vector<Buffer> buffers;
std::vector<std::vector<Idx>> indptrs;
};

struct OwningDataset {
std::optional<DatasetMutable> dataset;
std::optional<DatasetConst> const_dataset;
OwningMemory storage{};
};

OwningDataset create_owning_dataset(DatasetWritable& writable_dataset) {
auto const& info = writable_dataset.get_info();
bool const is_batch = info.is_batch();
Idx const batch_size = info.batch_size();
auto const& dataset_name = info.name();
OwningDataset owning_dataset{.dataset{DatasetMutable{dataset_name, is_batch, batch_size}},
.const_dataset = std::nullopt};

for (Idx component_idx{}; component_idx < info.n_components(); ++component_idx) {
auto const& component_name = info.component_name(component_idx);
auto const& component_meta = MetaData::get_component_by_name(dataset_name, component_name);
Idx const component_elements_per_scenario = info.component_elements_per_scenario(component_idx);
Idx const component_size = info.component_total_elements(component_idx);

auto& current_indptr = owning_dataset.storage.indptrs.emplace_back(
info.component_elements_per_scenario(component_idx) < 0 ? batch_size + 1 : 0);
if (!current_indptr.empty()) {
current_indptr.at(0) = 0;
current_indptr.at(batch_size) = component_size;
}
Idx* const indptr = current_indptr.empty() ? nullptr : current_indptr.data();
auto& current_buffer = owning_dataset.storage.buffers.emplace_back(component_meta, component_size);
writable_dataset.set_buffer(component_name, indptr, current_buffer);
owning_dataset.dataset.value().add_buffer(component_name, component_elements_per_scenario, component_size,
indptr, current_buffer);
}
owning_dataset.const_dataset = writable_dataset;
return owning_dataset;
}

OwningDataset create_result_dataset(OwningDataset const& input, std::string const& dataset_name, bool is_batch = false,
Idx batch_size = 1) {
OwningDataset owning_dataset{.dataset{DatasetMutable{dataset_name, is_batch, batch_size}},
.const_dataset = std::nullopt};
DatasetInfo const& input_info = input.dataset.get_info();

if (!input.const_dataset.has_value()) {
throw OptionalNotInitialized("DatasetConst");
}
DatasetInfo const& input_info = input.const_dataset.value().get_info();
OwningDataset result{.dataset = DatasetMutable{dataset_name, is_batch, batch_size}, .storage{}};

for (Idx component_idx{}; component_idx != input_info.n_components(); ++component_idx) {
auto const& component_name = input_info.component_name(component_idx);
auto const& component_meta = MetaData::get_component_by_name(dataset_name, component_name);
Idx const component_elements_per_scenario = input_info.component_elements_per_scenario(component_idx);
Idx const component_size = input_info.component_total_elements(component_idx);

auto& current_indptr = owning_dataset.storage.indptrs.emplace_back(
auto& current_indptr = result.storage.indptrs.emplace_back(
input_info.component_elements_per_scenario(component_idx) < 0 ? batch_size + 1 : 0);
Idx const* const indptr = current_indptr.empty() ? nullptr : current_indptr.data();
auto& current_buffer = owning_dataset.storage.buffers.emplace_back(component_meta, component_size);
owning_dataset.dataset.value().add_buffer(component_name, component_elements_per_scenario, component_size,
indptr, current_buffer);
auto& current_buffer = result.storage.buffers.emplace_back(component_meta, component_size);
result.dataset.add_buffer(component_name, component_elements_per_scenario, component_size, indptr,
current_buffer);
}
owning_dataset.const_dataset = owning_dataset.dataset.value();
return owning_dataset;
return result;
}

OwningDataset load_dataset(std::filesystem::path const& path) {
Expand Down Expand Up @@ -259,19 +204,13 @@ void assert_result(OwningDataset const& owning_result, OwningDataset const& owni
std::map<std::string, double, std::less<>> atol, double rtol) {
using namespace std::string_literals;

if (!owning_result.const_dataset.has_value()) {
throw OptionalNotInitialized("DatasetConst");
}
DatasetConst const& result = owning_result.const_dataset.value();
DatasetConst const result{owning_result.dataset};
auto const& result_info = result.get_info();
auto const& result_name = result_info.name();
Idx const result_batch_size = result_info.batch_size();
auto const& storage = owning_result.storage;

if (!owning_reference_result.const_dataset.has_value()) {
throw OptionalNotInitialized("DatasetConst");
}
DatasetConst const& reference_result = owning_reference_result.const_dataset.value();
DatasetConst const& reference_result = owning_reference_result.dataset;
auto const& reference_result_info = reference_result.get_info();
auto const& reference_result_name = reference_result_info.name();
auto const& reference_storage = owning_reference_result.storage;
Expand Down Expand Up @@ -574,8 +513,8 @@ void validate_single_case(CaseParam const& param) {

// create and run model
auto const& options = get_options(param);
Model model{50.0, validation_case.input.const_dataset.value()};
model.calculate(options, result.dataset.value());
Model model{50.0, validation_case.input.dataset};
model.calculate(options, result.dataset);

// check results
assert_result(result, validation_case.output.value(), param.atol, param.rtol);
Expand All @@ -586,21 +525,20 @@ void validate_batch_case(CaseParam const& param) {
execute_test(param, [&]() {
auto const output_prefix = get_output_type(param.calculation_type, param.sym);
auto const validation_case = create_validation_case(param, output_prefix);
auto const& info = validation_case.update_batch.value().const_dataset.value().get_info();
auto const& info = validation_case.update_batch.value().dataset.get_info();
Idx const batch_size = info.batch_size();
auto const batch_result =
create_result_dataset(validation_case.output_batch.value(), output_prefix, true, batch_size);

// create model
Model model{50.0, validation_case.input.const_dataset.value()};
Model model{50.0, validation_case.input.dataset};

// check results after whole update is finished
for (Idx const threading : {-1, 0, 1, 2}) {
CAPTURE(threading);
// set options and run
auto const& options = get_options(param, threading);
model.calculate(options, batch_result.dataset.value(),
validation_case.update_batch.value().const_dataset.value());
model.calculate(options, batch_result.dataset, validation_case.update_batch.value().dataset);

// check results
assert_result(batch_result, validation_case.output_batch.value(), param.atol, param.rtol);
Expand Down
17 changes: 17 additions & 0 deletions tests/native_api_tests/load_dataset.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
// SPDX-FileCopyrightText: Contributors to the Power Grid Model project <[email protected]>
//
// SPDX-License-Identifier: MPL-2.0

#pragma once

#include <power_grid_model_cpp/serialization.hpp>

namespace power_grid_model_cpp_test {
inline power_grid_model_cpp::OwningDataset load_dataset(std::string const& json_string) {
power_grid_model_cpp::Deserializer deserializer{json_string, PGM_json};
auto& writable_dataset = deserializer.get_dataset();
auto owning_dataset = power_grid_model_cpp::create_owning_dataset(writable_dataset);
deserializer.parse_to_buffer();
return owning_dataset;
}
} // namespace power_grid_model_cpp_test
Loading

0 comments on commit 8dede6b

Please sign in to comment.