Skip to content
Snippets Groups Projects
Select Git revision
  • fada8d067dde466a8d947de31ee1e84c68a8044b
  • master default
  • cmake_fixes
  • pulls/1772757862/750
  • copilot/fix-770
  • windows_ci_static
  • c_link
  • cpack
  • windows_ci
  • cert_pk_id
  • proxy_push_result
  • cnode_put_id
  • update-windows-build
  • proxy
  • resubscribe_on_token_change
  • actions
  • client_mode
  • llhttp
  • search_node_add
  • crypto_aes_gcm_argon2
  • ios_notifications
  • v3.4.0
  • v3.3.1
  • v3.3.1rc1
  • v3.3.1rc2
  • v3.3.0
  • v3.2.0
  • v3.1.11
  • v3.1.10
  • v3.1.9
  • v3.1.8.2
  • v3.1.8.1
  • v3.1.8
  • v3.1.7
  • v3.1.6
  • v3.1.5
  • v3.1.4
  • v3.1.3
  • v3.1.2
  • v3.1
  • v3.0.1
41 results

opendht.cpp

Blame
  • Code owners
    Assign users and groups as approvers for specific file changes. Learn more.
    TFInference.cpp 9.24 KiB
    /**
     *  Copyright (C) 2020 Savoir-faire Linux Inc.
     *
     *  Author: Aline Gondim Santos <aline.gondimsantos@savoirfairelinux.com>
     *
     *  This program is free software; you can redistribute it and/or modify
     *  it under the terms of the GNU General Public License as published by
     *  the Free Software Foundation; either version 3 of the License, or
     *  (at your option) any later version.
     *
     *  This program is distributed in the hope that it will be useful,
     *  but WITHOUT ANY WARRANTY; without even the implied warranty of
     *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     *  GNU General Public License for more details.
     *
     *  You should have received a copy of the GNU General Public License
     *  along with this program; if not, write to the Free Software
     *  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301
     * USA.
     */
    
    #include "TFInference.h"
    // Std libraries
    #include <fstream>
    #include <iostream>
    #include <numeric>
    #include <stdlib.h>
    
    #ifdef TFLITE
    // Tensorflow headers
    #include <tensorflow/lite/builtin_op_data.h>
    #include <tensorflow/lite/interpreter.h>
    #include <tensorflow/lite/kernels/register.h>
    #include <tensorflow/lite/model.h>
    #include <tensorflow/lite/optional_debug_tools.h>
    #else
    #ifdef WIN32
    #include <WinBase.h>
    #endif
    #include <tensorflow/core/graph/graph.h>
    //#include <tensorflow/core/graph/default_device.h>
    #include <tensorflow/core/platform/env.h>
    #endif // TFLITE
    
    #include <pluglog.h>
    
    const char sep = separator();
    const std::string TAG = "FORESEG";
    
    namespace jami {
    TensorflowInference::TensorflowInference(TFModel tfModel)
        : tfModel(tfModel)
    {}
    
    TensorflowInference::~TensorflowInference() {}
    
    bool
    TensorflowInference::isAllocated() const
    {
        return allocated_;
    }
    
    #ifdef TFLITE
    
    void
    TensorflowInference::loadModel()
    {
        Plog::log(Plog::LogPriority::INFO, TAG, "inside loadModel()");
        flatbufferModel = tflite::FlatBufferModel::BuildFromFile(tfModel.modelPath.c_str());
        if (!flatbufferModel) {