micro_op_resolver.h 3.0 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273
  1. /* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
  2. Licensed under the Apache License, Version 2.0 (the "License");
  3. you may not use this file except in compliance with the License.
  4. You may obtain a copy of the License at
  5. http://www.apache.org/licenses/LICENSE-2.0
  6. Unless required by applicable law or agreed to in writing, software
  7. distributed under the License is distributed on an "AS IS" BASIS,
  8. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9. See the License for the specific language governing permissions and
  10. limitations under the License.
  11. ==============================================================================*/
  12. #ifndef TENSORFLOW_LITE_MICRO_MICRO_OP_RESOLVER_H_
  13. #define TENSORFLOW_LITE_MICRO_MICRO_OP_RESOLVER_H_
  14. #include "tensorflow/lite/c/common.h"
  15. #include "tensorflow/lite/core/api/error_reporter.h"
  16. #include "tensorflow/lite/core/api/flatbuffer_conversions.h"
  17. #include "tensorflow/lite/core/api/op_resolver.h"
  18. #include "tensorflow/lite/schema/schema_generated.h"
  19. namespace tflite {
  20. // This is an interface for the OpResolver for TFLiteMicro. The differences from
  21. // the TFLite OpResolver base class are to:
  22. // * explicitly remove support for Op versions
  23. // * allow for finer grained registration of the Builtin Ops to reduce code
  24. // size for TFLiteMicro.
  25. //
  26. // We need an interface class instead of directly using MicroMutableOpResolver
  27. // because MicroMutableOpResolver is a class template with the number of
  28. // registered Ops as the template parameter.
  29. class MicroOpResolver : public OpResolver {
  30. public:
  31. typedef TfLiteStatus (*BuiltinParseFunction)(const Operator* op,
  32. ErrorReporter* error_reporter,
  33. BuiltinDataAllocator* allocator,
  34. void** builtin_data);
  35. // Returns the Op registration struct corresponding to the enum code from the
  36. // flatbuffer schema. Returns nullptr if the op is not found or if op ==
  37. // BuiltinOperator_CUSTOM.
  38. virtual const TfLiteRegistration* FindOp(BuiltinOperator op) const = 0;
  39. // Returns the Op registration struct corresponding to the custom operator by
  40. // name.
  41. virtual const TfLiteRegistration* FindOp(const char* op) const = 0;
  42. // This implementation exists for compatibility with the OpResolver base class
  43. // and disregards the version parameter.
  44. const TfLiteRegistration* FindOp(BuiltinOperator op,
  45. int version) const final {
  46. return FindOp(op);
  47. }
  48. // This implementation exists for compatibility with the OpResolver base class
  49. // and disregards the version parameter.
  50. const TfLiteRegistration* FindOp(const char* op, int version) const final {
  51. return FindOp(op);
  52. }
  53. // Returns the operator specific parsing function for the OpData for a
  54. // BuiltinOperator (if registered), else nullptr.
  55. virtual BuiltinParseFunction GetOpDataParser(BuiltinOperator op) const = 0;
  56. ~MicroOpResolver() override {}
  57. };
  58. } // namespace tflite
  59. #endif // TENSORFLOW_LITE_MICRO_MICRO_OP_RESOLVER_H_