-
Notifications
You must be signed in to change notification settings - Fork 3.1k
Expand file tree
/
Copy pathsoftmax.cpp
More file actions
102 lines (84 loc) · 3.07 KB
/
softmax.cpp
File metadata and controls
102 lines (84 loc) · 3.07 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
// Copyright (C) 2018-2026 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
#include <gtest/gtest.h>
#include <cmath>
#include <limits>
#include "shared_test_classes/single_op/softmax.hpp"
#include "common_test_utils/ov_tensor_utils.hpp"
namespace ov {
namespace test {
namespace subgraph {
// =======================
// GPU Numerical Edge Cases
// =======================
static void prepare_input(const std::vector<float>& values,
std::map<std::string, ov::Tensor>& inputsData,
std::vector<ov::Shape>& inputDynamicShapes) {
inputDynamicShapes.clear();
inputDynamicShapes.push_back({values.size()});
auto& tensor = inputsData.begin()->second;
auto* data = tensor.data<float>();
for (size_t i = 0; i < values.size(); ++i) {
data[i] = values[i];
}
}
static void check_output(const std::vector<float>& expected,
const std::vector<float>& actual) {
ASSERT_EQ(expected.size(), actual.size());
for (size_t i = 0; i < expected.size(); ++i) {
if (std::isnan(expected[i])) {
EXPECT_TRUE(std::isnan(actual[i]));
} else {
EXPECT_NEAR(expected[i], actual[i], 1e-6f);
}
}
}
TEST_P(SoftMaxLayerTest, MixedInfinityCases) {
std::vector<std::pair<std::vector<float>, std::vector<float>>> cases = {
{{INFINITY, 1.f, 2.f}, {NAN, 0.f, 0.f}},
{{INFINITY, -INFINITY, 1.f}, {NAN, 0.f, 0.f}}
};
for (auto& c : cases) {
prepare_input(c.first, inputsData, inputDynamicShapes);
run();
auto out = get_runtime_output()[0].as<std::vector<float>>();
check_output(c.second, out);
}
}
TEST_P(SoftMaxLayerTest, MultipleInfinityCases) {
std::vector<std::pair<std::vector<float>, std::vector<float>>> cases = {
{{INFINITY, INFINITY, 1.f}, {NAN, NAN, 0.f}},
{{INFINITY, INFINITY, INFINITY}, {NAN, NAN, NAN}},
{{INFINITY, -INFINITY, -INFINITY}, {NAN, 0.f, 0.f}}
};
for (auto& c : cases) {
prepare_input(c.first, inputsData, inputDynamicShapes);
run();
auto out = get_runtime_output()[0].as<std::vector<float>>();
check_output(c.second, out);
}
}
TEST_P(SoftMaxLayerTest, NegativeInfinityOnlyCase) {
prepare_input({-INFINITY, 1.f, 2.f}, inputsData, inputDynamicShapes);
run();
auto out = get_runtime_output()[0].as<std::vector<float>>();
std::vector<float> expected = {0.f, 0.2689414f, 0.7310586f};
EXPECT_THAT(out, ::testing::ElementsAreArray(expected));
}
TEST_P(SoftMaxLayerTest, NaNPropagationCases) {
std::vector<std::vector<float>> cases = {
{NAN, 1.f, 2.f},
{1.f, NAN, 2.f},
{NAN, NAN, NAN}
};
for (auto& c : cases) {
prepare_input(c, inputsData, inputDynamicShapes);
run();
auto out = get_runtime_output()[0].as<std::vector<float>>();
std::vector<float> expected(out.size(), std::numeric_limits<float>::quiet_NaN());
EXPECT_THAT(out, ::testing::ElementsAreArray(expected));
}
}
} // namespace subgraph
} // namespace test
} // namespace ov