Select Git revision
opendht.cpp
Code owners
Assign users and groups as approvers for specific file changes. Learn more.
TFInference.cpp 9.24 KiB
/**
* Copyright (C) 2020 Savoir-faire Linux Inc.
*
* Author: Aline Gondim Santos <aline.gondimsantos@savoirfairelinux.com>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
* USA.
*/
#include "TFInference.h"
// Std libraries
#include <fstream>
#include <iostream>
#include <numeric>
#include <stdlib.h>
#ifdef TFLITE
// Tensorflow headers
#include <tensorflow/lite/builtin_op_data.h>
#include <tensorflow/lite/interpreter.h>
#include <tensorflow/lite/kernels/register.h>
#include <tensorflow/lite/model.h>
#include <tensorflow/lite/optional_debug_tools.h>
#else
#ifdef WIN32
#include <WinBase.h>
#endif
#include <tensorflow/core/graph/graph.h>
//#include <tensorflow/core/graph/default_device.h>
#include <tensorflow/core/platform/env.h>
#endif // TFLITE
#include <pluglog.h>
const char sep = separator();
const std::string TAG = "FORESEG";
namespace jami {
TensorflowInference::TensorflowInference(TFModel tfModel)
: tfModel(tfModel)
{}
TensorflowInference::~TensorflowInference() {}
bool
TensorflowInference::isAllocated() const
{
return allocated_;
}
#ifdef TFLITE
void
TensorflowInference::loadModel()
{
Plog::log(Plog::LogPriority::INFO, TAG, "inside loadModel()");
flatbufferModel = tflite::FlatBufferModel::BuildFromFile(tfModel.modelPath.c_str());
if (!flatbufferModel) {