Skip to content
Merged
Show file tree
Hide file tree
Changes from 23 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
147 changes: 42 additions & 105 deletions modules/core/perf/func_tests/perf_tests.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -12,166 +12,103 @@
TEST(perf_tests, check_perf_pipeline) {
// Create data
std::vector<uint32_t> in(2000, 1);
std::vector<uint32_t> out(1, 0);

// Create task_data
auto task_data = std::make_shared<ppc::core::TaskData>();
task_data->inputs.emplace_back(reinterpret_cast<uint8_t *>(in.data()));
task_data->inputs_count.emplace_back(in.size());
task_data->outputs.emplace_back(reinterpret_cast<uint8_t *>(out.data()));
task_data->outputs_count.emplace_back(out.size());

// Create Task
auto test_task = std::make_shared<ppc::test::perf::TestTask<uint32_t>>(task_data);

// Create Perf attributes
auto perf_attr = std::make_shared<ppc::core::PerfAttr>();
perf_attr->num_running = 10;

// Create and init perf results
auto perf_results = std::make_shared<ppc::core::PerfResults>();
auto test_task = std::make_shared<ppc::test::perf::TestTask<uint32_t>>(in);

// Create Perf analyzer
ppc::core::Perf perf_analyzer(test_task);
perf_analyzer.PipelineRun(perf_attr, perf_results);

// Create Perf attributes
ppc::core::PerfAttr perf_attr;
perf_analyzer.PipelineRun(perf_attr);

// Get perf statistic
ppc::core::Perf::PrintPerfStatistic(perf_results);
ASSERT_LE(perf_results->time_sec, ppc::core::PerfResults::kMaxTime);
EXPECT_EQ(out[0], in.size());
perf_analyzer.PrintPerfStatistic();
ASSERT_LE(perf_analyzer.GetPerfResults().time_sec, ppc::core::PerfResults::kMaxTime);
EXPECT_EQ(test_task->Get(), in.size());
}

TEST(perf_tests, check_perf_pipeline_float) {
// Create data
std::vector<float> in(2000, 1);
std::vector<float> out(1, 0);

// Create task_data
auto task_data = std::make_shared<ppc::core::TaskData>();
task_data->inputs.emplace_back(reinterpret_cast<uint8_t *>(in.data()));
task_data->inputs_count.emplace_back(in.size());
task_data->outputs.emplace_back(reinterpret_cast<uint8_t *>(out.data()));
task_data->outputs_count.emplace_back(out.size());

// Create Task
auto test_task = std::make_shared<ppc::test::perf::TestTask<float>>(task_data);

// Create Perf attributes
auto perf_attr = std::make_shared<ppc::core::PerfAttr>();
perf_attr->num_running = 10;

// Create and init perf results
auto perf_results = std::make_shared<ppc::core::PerfResults>();
auto test_task = std::make_shared<ppc::test::perf::TestTask<float>>(in);

// Create Perf analyzer
ppc::core::Perf perf_analyzer(test_task);
perf_analyzer.PipelineRun(perf_attr, perf_results);

// Create Perf attributes
ppc::core::PerfAttr perf_attr;
perf_analyzer.PipelineRun(perf_attr);

// Get perf statistic
ppc::core::Perf::PrintPerfStatistic(perf_results);
ASSERT_LE(perf_results->time_sec, ppc::core::PerfResults::kMaxTime);
EXPECT_EQ(out[0], in.size());
perf_analyzer.PrintPerfStatistic();
ASSERT_LE(perf_analyzer.GetPerfResults().time_sec, ppc::core::PerfResults::kMaxTime);
EXPECT_EQ(test_task->Get(), in.size());
}

TEST(perf_tests, check_perf_pipeline_uint8_t_slow_test) {
// Create data
std::vector<uint8_t> in(128, 1);
std::vector<uint8_t> out(1, 0);

// Create task_data
auto task_data = std::make_shared<ppc::core::TaskData>();
task_data->inputs.emplace_back(reinterpret_cast<uint8_t *>(in.data()));
task_data->inputs_count.emplace_back(in.size());
task_data->outputs.emplace_back(reinterpret_cast<uint8_t *>(out.data()));
task_data->outputs_count.emplace_back(out.size());

// Create Task
auto test_task = std::make_shared<ppc::test::perf::FakePerfTask<uint8_t>>(task_data);
auto test_task = std::make_shared<ppc::test::perf::FakePerfTask<uint8_t>>(in);

// Create Perf analyzer
ppc::core::Perf perf_analyzer(test_task);

// Create Perf attributes
auto perf_attr = std::make_shared<ppc::core::PerfAttr>();
perf_attr->num_running = 1;
ppc::core::PerfAttr perf_attr;
perf_attr.num_running = 1;

const auto t0 = std::chrono::high_resolution_clock::now();
perf_attr->current_timer = [&] {
perf_attr.current_timer = [&] {
auto current_time_point = std::chrono::high_resolution_clock::now();
auto duration = std::chrono::duration_cast<std::chrono::nanoseconds>(current_time_point - t0).count();
return static_cast<double>(duration) * 1e-9;
};

// Create and init perf results
auto perf_results = std::make_shared<ppc::core::PerfResults>();

// Create Perf analyzer
ppc::core::Perf perf_analyzer(test_task);
perf_analyzer.PipelineRun(perf_attr, perf_results);
perf_analyzer.PipelineRun(perf_attr);

// Get perf statistic
ASSERT_ANY_THROW(ppc::core::Perf::PrintPerfStatistic(perf_results));
ASSERT_GE(perf_results->time_sec, ppc::core::PerfResults::kMaxTime);
EXPECT_EQ(out[0], in.size());
ASSERT_ANY_THROW(perf_analyzer.PrintPerfStatistic());
}

TEST(perf_tests, check_perf_task) {
TEST(perf_tests, check_perf_task_exception) {
// Create data
std::vector<uint32_t> in(2000, 1);
std::vector<uint32_t> out(1, 0);

// Create task_data
auto task_data = std::make_shared<ppc::core::TaskData>();
task_data->inputs.emplace_back(reinterpret_cast<uint8_t *>(in.data()));
task_data->inputs_count.emplace_back(in.size());
task_data->outputs.emplace_back(reinterpret_cast<uint8_t *>(out.data()));
task_data->outputs_count.emplace_back(out.size());

// Create Task
auto test_task = std::make_shared<ppc::test::perf::TestTask<uint32_t>>(task_data);

// Create Perf attributes
auto perf_attr = std::make_shared<ppc::core::PerfAttr>();
perf_attr->num_running = 10;

// Create and init perf results
auto perf_results = std::make_shared<ppc::core::PerfResults>();
auto test_task = std::make_shared<ppc::test::perf::TestTask<uint32_t>>(in);

// Create Perf analyzer
ppc::core::Perf perf_analyzer(test_task);
perf_analyzer.TaskRun(perf_attr, perf_results);

// Get perf statistic
perf_results->type_of_running = ppc::core::PerfResults::kNone;
ppc::core::Perf::PrintPerfStatistic(perf_results);
ASSERT_LE(perf_results->time_sec, ppc::core::PerfResults::kMaxTime);
EXPECT_EQ(out[0], in.size());
ASSERT_ANY_THROW(perf_analyzer.PrintPerfStatistic());

// Create Perf attributes
ppc::core::PerfAttr perf_attr;
perf_analyzer.TaskRun(perf_attr);
}

TEST(perf_tests, check_perf_task_float) {
// Create data
std::vector<float> in(2000, 1);
std::vector<float> out(1, 0);

// Create task_data
auto task_data = std::make_shared<ppc::core::TaskData>();
task_data->inputs.emplace_back(reinterpret_cast<uint8_t *>(in.data()));
task_data->inputs_count.emplace_back(in.size());
task_data->outputs.emplace_back(reinterpret_cast<uint8_t *>(out.data()));
task_data->outputs_count.emplace_back(out.size());

// Create Task
auto test_task = std::make_shared<ppc::test::perf::TestTask<float>>(task_data);

// Create Perf attributes
auto perf_attr = std::make_shared<ppc::core::PerfAttr>();
perf_attr->num_running = 10;

// Create and init perf results
auto perf_results = std::make_shared<ppc::core::PerfResults>();
auto test_task = std::make_shared<ppc::test::perf::TestTask<float>>(in);

// Create Perf analyzer
ppc::core::Perf perf_analyzer(test_task);
perf_analyzer.TaskRun(perf_attr, perf_results);

// Create Perf attributes
ppc::core::PerfAttr perf_attr;
perf_analyzer.TaskRun(perf_attr);

// Get perf statistic
perf_results->type_of_running = ppc::core::PerfResults::kPipeline;
ppc::core::Perf::PrintPerfStatistic(perf_results);
ASSERT_LE(perf_results->time_sec, ppc::core::PerfResults::kMaxTime);
EXPECT_EQ(out[0], in.size());
perf_analyzer.PrintPerfStatistic();
ASSERT_LE(perf_analyzer.GetPerfResults().time_sec, ppc::core::PerfResults::kMaxTime);
EXPECT_EQ(test_task->Get(), in.size());
}
22 changes: 11 additions & 11 deletions modules/core/perf/func_tests/test_task.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -12,35 +12,35 @@ namespace ppc::test::perf {
template <class T>
class TestTask : public ppc::core::Task {
public:
explicit TestTask(const ppc::core::TaskDataPtr &task_data) : Task(task_data) {}
explicit TestTask(const std::vector<T>& in) : input_(in) {}

bool ValidationImpl() override { return !input_.empty(); }

bool PreProcessingImpl() override {
input_ = reinterpret_cast<T *>(task_data->inputs[0]);
output_ = reinterpret_cast<T *>(task_data->outputs[0]);
output_[0] = 0;
output_ = 0;
return true;
}

bool ValidationImpl() override { return task_data->outputs_count[0] == 1; }

bool RunImpl() override {
for (unsigned i = 0; i < task_data->inputs_count[0]; i++) {
output_[0] += input_[i];
for (unsigned i = 0; i < input_.size(); i++) {
output_ += input_[i];
}
return true;
}

bool PostProcessingImpl() override { return true; }

T Get() { return output_; }

private:
T *input_{};
T *output_{};
std::vector<T> input_{};
T output_;
};

template <class T>
class FakePerfTask : public TestTask<T> {
public:
explicit FakePerfTask(ppc::core::TaskDataPtr perf_task_data) : TestTask<T>(perf_task_data) {}
explicit FakePerfTask(const std::vector<T>& in) : TestTask<T>(in) {}

bool RunImpl() override {
std::this_thread::sleep_for(std::chrono::seconds(11));
Expand Down
17 changes: 8 additions & 9 deletions modules/core/perf/include/perf.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ namespace ppc::core {

struct PerfAttr {
// count of task's running
uint64_t num_running;
uint64_t num_running = 10;
std::function<double()> current_timer = [&] { return 0.0; };
};

Expand All @@ -25,21 +25,20 @@ class Perf {
public:
// Init performance analysis with initialized task and initialized data
explicit Perf(const std::shared_ptr<Task>& task_ptr);
// Set task with initialized task and initialized data for performance
// analysis c
void SetTask(const std::shared_ptr<Task>& task_ptr);
// Check performance of full task's pipeline: PreProcessing() ->
// Validation() -> Run() -> PostProcessing()
void PipelineRun(const std::shared_ptr<PerfAttr>& perf_attr, const std::shared_ptr<PerfResults>& perf_results) const;
void PipelineRun(const PerfAttr& perf_attr);
// Check performance of task's Run() function
void TaskRun(const std::shared_ptr<PerfAttr>& perf_attr, const std::shared_ptr<PerfResults>& perf_results) const;
void TaskRun(const PerfAttr& perf_attr);
// Pint results for automation checkers
static void PrintPerfStatistic(const std::shared_ptr<PerfResults>& perf_results);
void PrintPerfStatistic() const;
// Get performance result structure of the current task
PerfResults GetPerfResults();

private:
PerfResults perf_results_;
std::shared_ptr<Task> task_;
static void CommonRun(const std::shared_ptr<PerfAttr>& perf_attr, const std::function<void()>& pipeline,
const std::shared_ptr<PerfResults>& perf_results);
static void CommonRun(const PerfAttr& perf_attr, const std::function<void()>& pipeline, PerfResults& perf_results);
};

} // namespace ppc::core
47 changes: 23 additions & 24 deletions modules/core/perf/src/perf.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,12 @@

#include "core/task/include/task.hpp"

ppc::core::Perf::Perf(const std::shared_ptr<Task>& task_ptr) { SetTask(task_ptr); }

void ppc::core::Perf::SetTask(const std::shared_ptr<Task>& task_ptr) {
task_ptr->GetData()->state_of_testing = TaskData::StateOfTesting::kPerf;
this->task_ = task_ptr;
ppc::core::Perf::Perf(const std::shared_ptr<Task>& task_ptr) : task_(task_ptr) {
task_ptr->GetStateOfTesting() = Task::StateOfTesting::kPerf;
}

void ppc::core::Perf::PipelineRun(const std::shared_ptr<PerfAttr>& perf_attr,
const std::shared_ptr<ppc::core::PerfResults>& perf_results) const {
perf_results->type_of_running = PerfResults::TypeOfRunning::kPipeline;
void ppc::core::Perf::PipelineRun(const PerfAttr& perf_attr) {
perf_results_.type_of_running = PerfResults::TypeOfRunning::kPipeline;

CommonRun(
perf_attr,
Expand All @@ -32,16 +28,15 @@ void ppc::core::Perf::PipelineRun(const std::shared_ptr<PerfAttr>& perf_attr,
task_->Run();
task_->PostProcessing();
},
perf_results);
perf_results_);
}

void ppc::core::Perf::TaskRun(const std::shared_ptr<PerfAttr>& perf_attr,
const std::shared_ptr<ppc::core::PerfResults>& perf_results) const {
perf_results->type_of_running = PerfResults::TypeOfRunning::kTaskRun;
void ppc::core::Perf::TaskRun(const PerfAttr& perf_attr) {
perf_results_.type_of_running = PerfResults::TypeOfRunning::kTaskRun;

task_->Validation();
task_->PreProcessing();
CommonRun(perf_attr, [&]() { task_->Run(); }, perf_results);
CommonRun(perf_attr, [&]() { task_->Run(); }, perf_results_);
task_->PostProcessing();

task_->Validation();
Expand All @@ -50,30 +45,32 @@ void ppc::core::Perf::TaskRun(const std::shared_ptr<PerfAttr>& perf_attr,
task_->PostProcessing();
}

void ppc::core::Perf::CommonRun(const std::shared_ptr<PerfAttr>& perf_attr, const std::function<void()>& pipeline,
const std::shared_ptr<ppc::core::PerfResults>& perf_results) {
auto begin = perf_attr->current_timer();
for (uint64_t i = 0; i < perf_attr->num_running; i++) {
void ppc::core::Perf::CommonRun(const PerfAttr& perf_attr, const std::function<void()>& pipeline,
ppc::core::PerfResults& perf_results) {
auto begin = perf_attr.current_timer();
for (uint64_t i = 0; i < perf_attr.num_running; i++) {
pipeline();
}
auto end = perf_attr->current_timer();
perf_results->time_sec = end - begin;
auto end = perf_attr.current_timer();
perf_results.time_sec = end - begin;
}

void ppc::core::Perf::PrintPerfStatistic(const std::shared_ptr<PerfResults>& perf_results) {
void ppc::core::Perf::PrintPerfStatistic() const {
std::string relative_path(::testing::UnitTest::GetInstance()->current_test_info()->file());
std::string ppc_regex_template("parallel_programming_course");
std::string perf_regex_template("perf_tests");
std::string type_test_name;

auto time_secs = perf_results->time_sec;
auto time_secs = perf_results_.time_sec;

if (perf_results->type_of_running == PerfResults::TypeOfRunning::kTaskRun) {
if (perf_results_.type_of_running == PerfResults::TypeOfRunning::kTaskRun) {
type_test_name = "task_run";
} else if (perf_results->type_of_running == PerfResults::TypeOfRunning::kPipeline) {
} else if (perf_results_.type_of_running == PerfResults::TypeOfRunning::kPipeline) {
type_test_name = "pipeline";
} else {
type_test_name = "none";
std::stringstream err_msg;
err_msg << '\n' << "The type of performance check for the task was not selected.\n";
throw std::runtime_error(err_msg.str().c_str());
}

auto first_found_position = relative_path.find(ppc_regex_template) + ppc_regex_template.length() + 1;
Expand All @@ -96,3 +93,5 @@ void ppc::core::Perf::PrintPerfStatistic(const std::shared_ptr<PerfResults>& per
throw std::runtime_error(err_msg.str().c_str());
}
}

ppc::core::PerfResults ppc::core::Perf::GetPerfResults() { return perf_results_; }
Loading
Loading