Skip to content

Commit 5b87e70

Browse files
committed
refactor performance tests: improve naming consistency across test cases for enhanced clarity and readability
1 parent e472e1d commit 5b87e70

1 file changed

Lines changed: 35 additions & 34 deletions

File tree

modules/performance/tests/perf_tests.cpp

Lines changed: 35 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ namespace ppc::performance {
5757

5858
namespace {
5959

60-
TEST(perf_tests, check_perf_pipeline) {
60+
TEST(PerfTest, Pipeline_WithUint32Vector_CompletesWithinTimeLimit) {
6161
std::vector<uint32_t> in(2000, 1);
6262

6363
auto test_task = std::make_shared<ppc::test::TestPerfTask<std::vector<uint32_t>, uint32_t>>(in);
@@ -72,7 +72,7 @@ TEST(perf_tests, check_perf_pipeline) {
7272
EXPECT_EQ(test_task->GetOutput(), in.size());
7373
}
7474

75-
TEST(perf_tests, check_perf_pipeline_float) {
75+
TEST(PerfTest, Pipeline_WithFloatVector_CompletesWithinTimeLimit) {
7676
std::vector<float> in(2000, 1);
7777

7878
auto test_task = std::make_shared<ppc::test::TestPerfTask<std::vector<float>, float>>(in);
@@ -87,7 +87,7 @@ TEST(perf_tests, check_perf_pipeline_float) {
8787
EXPECT_EQ(test_task->GetOutput(), in.size());
8888
}
8989

90-
TEST(perf_tests, check_perf_pipeline_uint8_t_slow_test) {
90+
TEST(PerfTest, Pipeline_WithSlowTask_ThrowsOnTimeExceeded) {
9191
std::vector<uint8_t> in(128, 1);
9292

9393
auto test_task = std::make_shared<ppc::test::FakePerfTask<std::vector<uint8_t>, uint8_t>>(in);
@@ -108,7 +108,7 @@ TEST(perf_tests, check_perf_pipeline_uint8_t_slow_test) {
108108
ASSERT_ANY_THROW(perf_analyzer.PrintPerfStatistic("check_perf_pipeline_uint8_t_slow_test"));
109109
}
110110

111-
TEST(perf_tests, check_perf_task_exception) {
111+
TEST(PerfTest, TaskRun_WithoutPriorExecution_ThrowsException) {
112112
std::vector<uint32_t> in(2000, 1);
113113

114114
auto test_task = std::make_shared<ppc::test::TestPerfTask<std::vector<uint32_t>, uint32_t>>(in);
@@ -121,7 +121,7 @@ TEST(perf_tests, check_perf_task_exception) {
121121
perf_analyzer.TaskRun(perf_attr);
122122
}
123123

124-
TEST(perf_tests, check_perf_task_float) {
124+
TEST(PerfTest, TaskRun_WithFloatVector_CompletesWithinTimeLimit) {
125125
std::vector<float> in(2000, 1);
126126

127127
auto test_task = std::make_shared<ppc::test::TestPerfTask<std::vector<float>, float>>(in);
@@ -146,7 +146,7 @@ struct ParamTestCase {
146146

147147
class GetStringParamNameParamTest : public ::testing::TestWithParam<ParamTestCase> {};
148148

149-
TEST_P(GetStringParamNameParamTest, ReturnsExpectedString) {
149+
TEST_P(GetStringParamNameParamTest, GetStringParamName_WithValidInput_ReturnsExpectedString) {
150150
const auto& param = GetParam();
151151
EXPECT_EQ(GetStringParamName(param.input), param.expected_output);
152152
}
@@ -189,7 +189,7 @@ class GetStringTaskTypeTest : public ::testing::TestWithParam<TaskTypeTestCase>
189189
void TearDown() override { std::filesystem::remove(temp_path); }
190190
};
191191

192-
TEST_P(GetStringTaskTypeTest, ReturnsExpectedString) {
192+
TEST_P(GetStringTaskTypeTest, GetStringTaskType_WithValidTypeAndFile_ReturnsExpectedString) {
193193
const auto& param = GetParam();
194194
EXPECT_EQ(GetStringTaskType(param.type, temp_path), param.expected) << "Failed on: " << param.label;
195195
}
@@ -202,12 +202,12 @@ INSTANTIATE_TEST_SUITE_P(AllTypeCases, GetStringTaskTypeTest,
202202
TaskTypeTestCase{TypeOfTask::kTBB, "tbb_TBB", "kTBB"},
203203
TaskTypeTestCase{TypeOfTask::kSEQ, "seq_SEQ", "kSEQ"}));
204204

205-
TEST(GetStringTaskTypeStandaloneTest, ThrowsIfFileMissing) {
205+
TEST(GetStringTaskTypeStandaloneTest, GetStringTaskType_WithMissingFile_ThrowsException) {
206206
std::string missing_path = "non_existent_settings.json";
207207
EXPECT_THROW(GetStringTaskType(TypeOfTask::kSEQ, missing_path), std::runtime_error);
208208
}
209209

210-
TEST(GetStringTaskTypeStandaloneTest, ExceptionMessageContainsPath) {
210+
TEST(GetStringTaskTypeStandaloneTest, GetStringTaskType_WithMissingFile_ExceptionContainsPath) {
211211
const std::string missing_path = "non_existent_settings.json";
212212
EXPECT_THROW(try { GetStringTaskType(TypeOfTask::kSEQ, missing_path); } catch (const std::runtime_error& e) {
213213
EXPECT_NE(std::string(e.what()).find(missing_path), std::string::npos);
@@ -216,7 +216,7 @@ TEST(GetStringTaskTypeStandaloneTest, ExceptionMessageContainsPath) {
216216
std::runtime_error);
217217
}
218218

219-
TEST(GetStringTaskTypeStandaloneTest, ReturnsUnknownForInvalidEnum) {
219+
TEST(GetStringTaskTypeStandaloneTest, GetStringTaskType_WithInvalidEnum_ReturnsUnknown) {
220220
std::string path = (std::filesystem::temp_directory_path() / "tmp_settings.json").string();
221221
std::ofstream(path) << R"({"tasks":{"seq":"SEQ"}})";
222222

@@ -226,18 +226,18 @@ TEST(GetStringTaskTypeStandaloneTest, ReturnsUnknownForInvalidEnum) {
226226
std::filesystem::remove(path);
227227
}
228228

229-
TEST(GetStringTaskTypeEdgeCases, ThrowsIfFileCannotBeOpened) {
229+
TEST(GetStringTaskTypeEdgeCases, GetStringTaskType_WithUnreadableFile_ThrowsException) {
230230
EXPECT_THROW(GetStringTaskType(TypeOfTask::kSEQ, "definitely_missing_file.json"), std::runtime_error);
231231
}
232232

233-
TEST(GetStringTaskTypeEdgeCases, ThrowsIfJsonIsMalformed) {
233+
TEST(GetStringTaskTypeEdgeCases, GetStringTaskType_WithMalformedJson_ThrowsException) {
234234
std::string path = (std::filesystem::temp_directory_path() / "bad_json.json").string();
235235
std::ofstream(path) << "{ this is not valid json ";
236236
EXPECT_THROW(GetStringTaskType(TypeOfTask::kSEQ, path), NlohmannJsonParseError);
237237
std::filesystem::remove(path);
238238
}
239239

240-
TEST(GetStringTaskTypeEdgeCases, ThrowsIfJsonValueIsNull) {
240+
TEST(GetStringTaskTypeEdgeCases, GetStringTaskType_WithNullJsonValue_ThrowsException) {
241241
std::string path = (std::filesystem::temp_directory_path() / "null_value.json").string();
242242
std::ofstream(path) << R"({"tasks": { "seq": null }})";
243243

@@ -246,15 +246,15 @@ TEST(GetStringTaskTypeEdgeCases, ThrowsIfJsonValueIsNull) {
246246
std::filesystem::remove(path);
247247
}
248248

249-
TEST(GetStringTaskTypeEdgeCases, ReturnsUnknownIfEnumOutOfRange) {
249+
TEST(GetStringTaskTypeEdgeCases, GetStringTaskType_WithEnumOutOfRange_ReturnsUnknown) {
250250
std::string path = (std::filesystem::temp_directory_path() / "ok.json").string();
251251
std::ofstream(path) << R"({"tasks":{"seq":"SEQ"}})";
252252
auto result = GetStringTaskType(TypeOfTask::kUnknown, path);
253253
EXPECT_EQ(result, "unknown");
254254
std::filesystem::remove(path);
255255
}
256256

257-
TEST(GetStringTaskStatusTest, HandlesEnabledAndDisabled) {
257+
TEST(GetStringTaskStatusTest, GetStringTaskStatus_WithEnabledAndDisabled_ReturnsCorrectString) {
258258
EXPECT_EQ(GetStringTaskStatus(StatusOfTask::kEnabled), "enabled");
259259
EXPECT_EQ(GetStringTaskStatus(StatusOfTask::kDisabled), "disabled");
260260
}
@@ -268,7 +268,7 @@ class DummyTask : public Task<int, int> {
268268
bool PostProcessingImpl() override { return true; }
269269
};
270270

271-
TEST(TaskTest, GetDynamicTypeReturnsCorrectEnum) {
271+
TEST(TaskTest, GetDynamicType_WithValidTask_ReturnsCorrectEnum) {
272272
DummyTask task;
273273
task.SetTypeOfTask(TypeOfTask::kOMP);
274274
task.Validation();
@@ -278,10 +278,11 @@ TEST(TaskTest, GetDynamicTypeReturnsCorrectEnum) {
278278
EXPECT_EQ(task.GetDynamicTypeOfTask(), TypeOfTask::kOMP);
279279
}
280280

281-
TEST(TaskTest, DestructorTerminatesIfWrongOrder) {
281+
TEST(TaskTest, Destructor_WithWrongOrder_TerminatesGracefully) {
282282
{
283283
DummyTask task;
284284
EXPECT_THROW(task.Run(), std::runtime_error);
285+
// This task doesn't cause destructor failure - just execution order error
285286
}
286287

287288
// Create a new task to complete the lifecycle properly
@@ -328,7 +329,7 @@ TYPED_TEST(GetNamespaceTest, ExtractsNamespaceCorrectly) {
328329
}
329330
}
330331

331-
TEST(PerfTest, PipelineRunAndTaskRun) {
332+
TEST(PerfTest, PipelineRunAndTaskRun_WithValidTask_ExecutesSuccessfully) {
332333
auto task_ptr = std::make_shared<DummyTask>();
333334
Perf<int, int> perf(task_ptr);
334335

@@ -352,7 +353,7 @@ TEST(PerfTest, PipelineRunAndTaskRun) {
352353
EXPECT_GT(res_taskrun.time_sec, 0.0);
353354
}
354355

355-
TEST(PerfTest, PrintPerfStatisticThrowsOnNone) {
356+
TEST(PerfTest, PrintPerfStatistic_WithNoneType_ThrowsException) {
356357
{
357358
auto task_ptr = std::make_shared<DummyTask>();
358359
Perf<int, int> perf(task_ptr);
@@ -362,34 +363,34 @@ TEST(PerfTest, PrintPerfStatisticThrowsOnNone) {
362363
ppc::util::DestructorFailureFlag::Unset();
363364
}
364365

365-
TEST(PerfTest, GetStringParamNameTest) {
366+
TEST(PerfTest, GetStringParamName_WithValidParameters_ReturnsCorrectString) {
366367
EXPECT_EQ(GetStringParamName(PerfResults::kTaskRun), "task_run");
367368
EXPECT_EQ(GetStringParamName(PerfResults::kPipeline), "pipeline");
368369
EXPECT_EQ(GetStringParamName(PerfResults::kNone), "none");
369370
}
370371

371-
TEST(PerfTest, DefaultTimerReturnsNegativeOne) { EXPECT_EQ(DefaultTimer(), -1.0); }
372+
TEST(PerfTest, DefaultTimer_WhenCalled_ReturnsNegativeOne) { EXPECT_EQ(DefaultTimer(), -1.0); }
372373

373-
TEST(PerfTest, PerfAttrDefaultValues) {
374+
TEST(PerfTest, PerfAttr_WithDefaultConstructor_HasCorrectDefaultValues) {
374375
PerfAttr attr;
375376
EXPECT_EQ(attr.num_running, 5U);
376377
EXPECT_EQ(attr.current_timer(), -1.0);
377378
}
378379

379-
TEST(PerfTest, PerfResultsDefaultValues) {
380+
TEST(PerfTest, PerfResults_WithDefaultConstructor_HasCorrectDefaultValues) {
380381
PerfResults results;
381382
EXPECT_EQ(results.time_sec, 0.0);
382383
EXPECT_EQ(results.type_of_running, PerfResults::kNone);
383384
EXPECT_EQ(PerfResults::kMaxTime, 10.0);
384385
}
385386

386-
TEST(PerfTest, PerfResultsEnumValues) {
387+
TEST(PerfTest, PerfResults_WithEnumValues_HasCorrectValues) {
387388
EXPECT_EQ(static_cast<uint8_t>(PerfResults::kPipeline), 0);
388389
EXPECT_EQ(static_cast<uint8_t>(PerfResults::kTaskRun), 1);
389390
EXPECT_EQ(static_cast<uint8_t>(PerfResults::kNone), 2);
390391
}
391392

392-
TEST(PerfTest, PerfConstructorSetsTaskState) {
393+
TEST(PerfTest, PerfConstructor_WithTask_SetsTaskStateCorrectly) {
393394
auto task_ptr = std::make_shared<DummyTask>();
394395
Perf<int, int> perf(task_ptr);
395396

@@ -402,7 +403,7 @@ TEST(PerfTest, PerfConstructorSetsTaskState) {
402403
task_ptr->PostProcessing();
403404
}
404405

405-
TEST(PerfTest, GetPerfResultsReturnsCorrectResults) {
406+
TEST(PerfTest, GetPerfResults_AfterExecution_ReturnsCorrectResults) {
406407
auto task_ptr = std::make_shared<DummyTask>();
407408
Perf<int, int> perf(task_ptr);
408409

@@ -430,7 +431,7 @@ TEST(PerfTest, GetPerfResultsReturnsCorrectResults) {
430431
EXPECT_GT(taskrun_results.time_sec, 0.0);
431432
}
432433

433-
TEST(PerfTest, CommonRunCalculatesAverageTime) {
434+
TEST(PerfTest, CommonRun_WithMultipleExecutions_CalculatesAverageTime) {
434435
auto task_ptr = std::make_shared<DummyTask>();
435436
Perf<int, int> perf(task_ptr);
436437

@@ -453,7 +454,7 @@ TEST(PerfTest, CommonRunCalculatesAverageTime) {
453454
EXPECT_DOUBLE_EQ(results.time_sec, 1.0);
454455
}
455456

456-
TEST(PerfTest, PrintPerfStatisticPipelineOutput) {
457+
TEST(PerfTest, PrintPerfStatistic_WithPipelineExecution_OutputsCorrectStatistics) {
457458
auto task_ptr = std::make_shared<DummyTask>();
458459
Perf<int, int> perf(task_ptr);
459460

@@ -475,7 +476,7 @@ TEST(PerfTest, PrintPerfStatisticPipelineOutput) {
475476
EXPECT_NE(output.find("0.0200000000"), std::string::npos); // 0.1/5 = 0.02
476477
}
477478

478-
TEST(PerfTest, PrintPerfStatisticTaskRunOutput) {
479+
TEST(PerfTest, PrintPerfStatistic_WithTaskRunExecution_OutputsCorrectStatistics) {
479480
auto task_ptr = std::make_shared<DummyTask>();
480481
Perf<int, int> perf(task_ptr);
481482

@@ -496,7 +497,7 @@ TEST(PerfTest, PrintPerfStatisticTaskRunOutput) {
496497
EXPECT_NE(output.find("test_taskrun:task_run:"), std::string::npos);
497498
}
498499

499-
TEST(PerfTest, PrintPerfStatisticThrowsOnExceedingMaxTime) {
500+
TEST(PerfTest, PrintPerfStatistic_WithTimeExceeded_ThrowsException) {
500501
auto task_ptr = std::make_shared<DummyTask>();
501502
Perf<int, int> perf(task_ptr);
502503

@@ -524,7 +525,7 @@ TEST(PerfTest, PrintPerfStatisticThrowsOnExceedingMaxTime) {
524525
EXPECT_NE(output.find("test_exceed_time:pipeline:-1.0000000000"), std::string::npos);
525526
}
526527

527-
TEST(PerfTest, TaskRunCompletesPipelineAfterTiming) {
528+
TEST(PerfTest, TaskRun_WithTiming_CompletesPipelineCorrectly) {
528529
int validation_count = 0;
529530
int preprocessing_count = 0;
530531
int run_count = 0;
@@ -582,7 +583,7 @@ TEST(PerfTest, TaskRunCompletesPipelineAfterTiming) {
582583

583584
namespace test_namespace {} // namespace test_namespace
584585

585-
TEST(PerfTest, TemplateInstantiationWithDifferentTypes) {
586+
TEST(PerfTest, Template_WithDifferentTypes_InstantiatesCorrectly) {
586587
// Test that the Perf template can be instantiated with different types
587588
auto int_task = std::make_shared<DummyTask>();
588589
Perf<int, int> int_perf(int_task);
@@ -599,7 +600,7 @@ TEST(PerfTest, TemplateInstantiationWithDifferentTypes) {
599600
EXPECT_EQ(vector_perf.GetPerfResults().type_of_running, PerfResults::kPipeline);
600601
}
601602

602-
TEST(PerfTest, PerfAttrCustomValues) {
603+
TEST(PerfTest, PerfAttr_WithCustomValues_SetsValuesCorrectly) {
603604
PerfAttr attr;
604605
attr.num_running = 10;
605606
attr.current_timer = []() { return 42.0; };
@@ -608,7 +609,7 @@ TEST(PerfTest, PerfAttrCustomValues) {
608609
EXPECT_EQ(attr.current_timer(), 42.0);
609610
}
610611

611-
TEST(TaskTest, Destructor_InvalidPipelineOrderTerminates_PartialPipeline) {
612+
TEST(TaskTest, Destructor_WithInvalidPipelineOrderAndPartialExecution_TerminatesGracefully) {
612613
{
613614
struct BadTask : Task<int, int> {
614615
bool ValidationImpl() override { return true; }

0 commit comments

Comments
 (0)