diff --git a/README.md b/README.md index 00c91ee..e4c1901 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,8 @@ We keep track with the ESP-IDF's support period policy mentioned [here](https:// Currently ESP-IDF versions `release/v4.4` and above are supported by this project. +## Method 1: Using ESP IDF + ### Install the ESP IDF Follow the instructions of the @@ -29,6 +31,28 @@ The next steps assume that this installation is successful and the * the `IDF_PATH` environment variable is set * the `idf.py` and Xtensa-esp32 tools (e.g., `xtensa-esp32-elf-gcc`) are in `$PATH` +## Method 2: Using PlatformIO + +### Configuration + +Add the following configuration to your `platformio.ini` file: + +```ini +[env:your_env_name] +build_flags = + -I ${PROJECT_LIBDEPS_DIR}/${PIOENV}/esp-tflite-micro/third_party/flatbuffers/include + -I ${PROJECT_LIBDEPS_DIR}/${PIOENV}/esp-tflite-micro/third_party/gemmlowp + -I ${PROJECT_LIBDEPS_DIR}/${PIOENV}/esp-tflite-micro/third_party/kissfft + -I ${PROJECT_LIBDEPS_DIR}/${PIOENV}/esp-tflite-micro/third_party/ruy + -Ofast + +lib_deps = + https://github.com/espressif/esp-tflite-micro.git + +; src_dir = + -I ${PROJECT_LIBDEPS_DIR}/${PIOENV}/esp-tflite-micro/examples/hello_world +``` + ## Using the component Run the following command in your ESP-IDF project to install this component: diff --git a/tensorflow/lite/array.h b/tensorflow/lite/array.h new file mode 100644 index 0000000..5a60784 --- /dev/null +++ b/tensorflow/lite/array.h @@ -0,0 +1,123 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_ARRAY_H_ +#define TENSORFLOW_LITE_ARRAY_H_ + +#include +#include +#include +#include +#include + +#include "tensorflow/lite/core/c/common.h" + +namespace tflite { + +/// TfLite*Array helpers + +namespace array_internal { + +// Function object used as a deleter for unique_ptr holding TFLite*Array +// objects. +struct TfLiteArrayDeleter { + void operator()(TfLiteIntArray* a); + void operator()(TfLiteFloatArray* a); +}; + +// Maps T to the corresponding TfLiteArray type. +template +struct TfLiteArrayInfo; + +template <> +struct TfLiteArrayInfo { + using Type = TfLiteIntArray; +}; + +template <> +struct TfLiteArrayInfo { + using Type = TfLiteFloatArray; +}; + +} // namespace array_internal + +template +using TfLiteArrayUniquePtr = + std::unique_ptr::Type, + array_internal::TfLiteArrayDeleter>; + +// `unique_ptr` wrapper for `TfLiteIntArray`s. +using IntArrayUniquePtr = TfLiteArrayUniquePtr; + +// `unique_ptr` wrapper for `TfLiteFloatArray`s. +using FloatArrayUniquePtr = TfLiteArrayUniquePtr; + +// Allocates a TfLiteArray of given size using malloc. +// +// This builds an int array by default as this is the overwhelming part of the +// use cases. +template +TfLiteArrayUniquePtr BuildTfLiteArray(int size); + +// Allocates a TfLiteIntArray of given size using malloc. +template <> +inline IntArrayUniquePtr BuildTfLiteArray(const int size) { + return IntArrayUniquePtr(TfLiteIntArrayCreate(size)); +} + +// Allocates a TfLiteFloatArray of given size using malloc. +template <> +inline FloatArrayUniquePtr BuildTfLiteArray(const int size) { + return FloatArrayUniquePtr(TfLiteFloatArrayCreate(size)); +} + +// Allocates a TFLiteArray of given size and initializes it. +// +// `values` is expected to holds `size` elements. +template +TfLiteArrayUniquePtr BuildTfLiteArray(const int size, + const T* const values) { + auto array = BuildTfLiteArray(size); + if (array) { + memcpy(array->data, values, size * sizeof(T)); + } + return array; +} + +// Allocates a TFLiteArray and initializes it with the given values. +template +TfLiteArrayUniquePtr BuildTfLiteArray(const std::vector& values) { + return BuildTfLiteArray(static_cast(values.size()), values.data()); +} + +// Allocates a TFLiteArray and initializes it with the given values. +template +TfLiteArrayUniquePtr BuildTfLiteArray( + const std::initializer_list& values) { + return BuildTfLiteArray(static_cast(values.size()), values.begin()); +} + +// Allocates a TFLiteArray and initializes it with the given array. +inline IntArrayUniquePtr BuildTfLiteArray(const TfLiteIntArray& other) { + return BuildTfLiteArray(other.size, other.data); +} + +// Allocates a TFLiteArray and initializes it with the given array. +inline FloatArrayUniquePtr BuildTfLiteArray(const TfLiteFloatArray& other) { + return BuildTfLiteArray(other.size, other.data); +} + +} // namespace tflite + +#endif // TENSORFLOW_LITE_ARRAY_H_