|
| 1 | +#pragma once |
| 2 | + |
| 3 | +#include "dl_math.hpp" |
| 4 | +#include "dl_module_base.hpp" |
| 5 | +#include "dl_module_lut.hpp" |
| 6 | + |
| 7 | +namespace dl { |
| 8 | +namespace module { |
| 9 | +/** |
| 10 | + * NOTE: |
| 11 | + * |
| 12 | + * @tparam feature_t supports int16_t and int8_t, |
| 13 | + * - int16_t: stands for operation in int16_t, implemented by LUT |
| 14 | + * - int8_t: stands for operation in int16_t, implemented by LUT |
| 15 | + * y = x * sigmoid(x) |
| 16 | + */ |
| 17 | +class Swish : public Module { |
| 18 | +public: |
| 19 | + /** |
| 20 | + * @brief Construct a new Swish object. |
| 21 | + * |
| 22 | + * @param name name of module |
| 23 | + * @param inplace inplace type. |
| 24 | + */ |
| 25 | + Swish(const char *name = NULL, |
| 26 | + module_inplace_t inplace = MODULE_NON_INPLACE, |
| 27 | + quant_type_t quant_type = QUANT_TYPE_NONE) : |
| 28 | + Module(name, inplace, quant_type) |
| 29 | + { |
| 30 | + } |
| 31 | + |
| 32 | + /** |
| 33 | + * @brief Destroy the Swish object. |
| 34 | + */ |
| 35 | + ~Swish() {} |
| 36 | + |
| 37 | + std::vector<std::vector<int>> get_output_shape(std::vector<std::vector<int>> &input_shapes) |
| 38 | + { |
| 39 | + std::vector<std::vector<int>> output_shapes(1, input_shapes[0]); |
| 40 | + return output_shapes; |
| 41 | + } |
| 42 | + |
| 43 | + void forward(ModelContext *context, runtime_mode_t mode = RUNTIME_MODE_AUTO) |
| 44 | + { |
| 45 | + if (quant_type == QUANT_TYPE_SYMM_8BIT) { |
| 46 | + forward_template<int8_t>(context, mode); |
| 47 | + } else if (quant_type == QUANT_TYPE_SYMM_16BIT) { |
| 48 | + forward_template<int16_t>(context, mode); |
| 49 | + } |
| 50 | + } |
| 51 | + |
| 52 | + template <typename T> |
| 53 | + void forward_template(ModelContext *context, runtime_mode_t mode) |
| 54 | + { |
| 55 | + TensorBase *input = context->get_tensor(m_inputs_index[0]); |
| 56 | + TensorBase *output = context->get_tensor(m_outputs_index[0]); |
| 57 | + T *input_ptr = (T *)input->get_element_ptr(); |
| 58 | + T *output_ptr = (T *)output->get_element_ptr(); |
| 59 | + |
| 60 | + float input_scale = DL_SCALE(input->exponent); |
| 61 | + float output_scale = DL_RESCALE(output->exponent); |
| 62 | + for (size_t i = 0; i < input->size; i++) { |
| 63 | + float temp = input_ptr[i] * input_scale; |
| 64 | + temp = dl::math::sigmoid(temp) * temp; |
| 65 | + tool::truncate(output_ptr[i], tool::round(temp * output_scale)); |
| 66 | + } |
| 67 | + } |
| 68 | + |
| 69 | + void forward_args(void *args) {} |
| 70 | + |
| 71 | + /** |
| 72 | + * @brief deserialize Swish module instance by node serialization information |
| 73 | + */ |
| 74 | + static Module *deserialize(fbs::FbsModel *fbs_model, std::string node_name) |
| 75 | + { |
| 76 | + Module *op = nullptr; |
| 77 | + quant_type_t quant_type; |
| 78 | + fbs_model->get_operation_attribute(node_name, "quant_type", quant_type); |
| 79 | + |
| 80 | + // Create module |
| 81 | + if (quant_type == QUANT_TYPE_SYMM_8BIT) { |
| 82 | + TensorBase *table = fbs_model->get_operation_lut(node_name); |
| 83 | + if (table) { |
| 84 | + op = new LUT(node_name.c_str(), table, MODULE_INPLACE_CHANGED_BUFFER, quant_type); |
| 85 | + } else { |
| 86 | + op = new Swish(node_name.c_str(), MODULE_INPLACE_CHANGED_BUFFER, quant_type); |
| 87 | + } |
| 88 | + } else { |
| 89 | + op = new Swish(node_name.c_str(), MODULE_INPLACE_CHANGED_BUFFER, quant_type); |
| 90 | + } |
| 91 | + |
| 92 | + return op; |
| 93 | + } |
| 94 | + |
| 95 | + void print() { ESP_LOGI("Swish", "quant_type: %s.", quant_type_to_string(quant_type)); } |
| 96 | +}; |
| 97 | +} // namespace module |
| 98 | +} // namespace dl |
0 commit comments