当前位置: 首页>>代码示例>>Python>>正文


Python interpreter.load_delegate方法代码示例

本文整理汇总了Python中tflite_runtime.interpreter.load_delegate方法的典型用法代码示例。如果您正苦于以下问题:Python interpreter.load_delegate方法的具体用法?Python interpreter.load_delegate怎么用?Python interpreter.load_delegate使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在tflite_runtime.interpreter的用法示例。


在下文中一共展示了interpreter.load_delegate方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: __init__

# 需要导入模块: from tflite_runtime import interpreter [as 别名]
# 或者: from tflite_runtime.interpreter import load_delegate [as 别名]
def __init__(self):
        edge_tpu_delegate = None
        try:
            edge_tpu_delegate = load_delegate('libedgetpu.so.1.0')
        except ValueError:
            print("No EdgeTPU detected. Falling back to CPU.")
        
        if edge_tpu_delegate is None:
            self.interpreter = tflite.Interpreter(
                model_path='/cpu_model.tflite')
        else:
            self.interpreter = tflite.Interpreter(
                model_path='/edgetpu_model.tflite',
                experimental_delegates=[edge_tpu_delegate])
        
        self.interpreter.allocate_tensors()

        self.tensor_input_details = self.interpreter.get_input_details()
        self.tensor_output_details = self.interpreter.get_output_details() 
开发者ID:blakeblackshear,项目名称:frigate,代码行数:21,代码来源:edgetpu.py

示例2: _get_edgetpu_interpreter

# 需要导入模块: from tflite_runtime import interpreter [as 别名]
# 或者: from tflite_runtime.interpreter import load_delegate [as 别名]
def _get_edgetpu_interpreter(model=None):  # pragma: no cover
    # Note: Looking for ideas how to test Coral EdgeTPU dependent code
    # in a cloud CI environment such as Travis CI and Github
    tf_interpreter = None
    if model:
        try:
            edgetpu_delegate = load_delegate('libedgetpu.so.1.0')
            assert edgetpu_delegate
            tf_interpreter = Interpreter(
                model_path=model,
                experimental_delegates=[edgetpu_delegate]
                )
            log.debug('EdgeTPU available. Will use EdgeTPU model.')
        except Exception as e:
            log.debug('EdgeTPU init error: %r', e)
            # log.debug(stacktrace())
    return tf_interpreter 
开发者ID:ambianic,项目名称:ambianic-edge,代码行数:19,代码来源:inference.py

示例3: loadModel

# 需要导入模块: from tflite_runtime import interpreter [as 别名]
# 或者: from tflite_runtime.interpreter import load_delegate [as 别名]
def loadModel(dP):
    if dP.TFliteRuntime:
        import tflite_runtime.interpreter as tflite
        # model here is intended as interpreter
        if dP.runCoralEdge:
            print(" Running on Coral Edge TPU")
            try:
                model = tflite.Interpreter(model_path=os.path.splitext(dP.model_name)[0]+'_edgetpu.tflite',
                    experimental_delegates=[tflite.load_delegate(dP.edgeTPUSharedLib,{})])
            except:
                print(" Coral Edge TPU not found. Please make sure it's connected. ")
        else:
            model = tflite.Interpreter(model_path=os.path.splitext(dP.model_name)[0]+'.tflite')
        model.allocate_tensors()
    else:
        getTFVersion(dP)
        import tensorflow as tf
        if dP.useTFlitePred:
            # model here is intended as interpreter
            model = tf.lite.Interpreter(model_path=os.path.splitext(dP.model_name)[0]+'.tflite')
            model.allocate_tensors()
        else:
            model = tf.keras.models.load_model(dP.model_name)
    return model

#************************************
# Make prediction based on framework
#************************************ 
开发者ID:feranick,项目名称:SpectralMachine,代码行数:30,代码来源:libSpectraKeras.py

示例4: __init__

# 需要导入模块: from tflite_runtime import interpreter [as 别名]
# 或者: from tflite_runtime.interpreter import load_delegate [as 别名]
def __init__(self):
        # Load TFLite model and allocate tensors.
        self.interpreter = tflite.Interpreter(model_path=PERSON_CLASS_MODEL,
            experimental_delegates=[tflite.load_delegate('libedgetpu.so.1')])
        self.interpreter.allocate_tensors()

        # Get input and output tensors.
        self.input_details = self.interpreter.get_input_details()
        self.output_details = self.interpreter.get_output_details() 
开发者ID:goruck,项目名称:smart-zoneminder,代码行数:11,代码来源:detect_servers_tpu.py

示例5: make_interpreter

# 需要导入模块: from tflite_runtime import interpreter [as 别名]
# 或者: from tflite_runtime.interpreter import load_delegate [as 别名]
def make_interpreter(model_file):
    model_file, *device = model_file.split('@')
    return tflite.Interpreter(
      model_path=model_file,
      experimental_delegates=[
          tflite.load_delegate(EDGETPU_SHARED_LIB,
                               {'device': device[0]} if device else {})
      ]) 
开发者ID:google-coral,项目名称:examples-camera,代码行数:10,代码来源:common.py

示例6: make_interpreter

# 需要导入模块: from tflite_runtime import interpreter [as 别名]
# 或者: from tflite_runtime.interpreter import load_delegate [as 别名]
def make_interpreter(model_file):
  model_file, *device = model_file.split('@')
  return tflite.Interpreter(
      model_path=model_file,
      experimental_delegates=[
          tflite.load_delegate(EDGETPU_SHARED_LIB,
                               {'device': device[0]} if device else {})
      ]) 
开发者ID:google-coral,项目名称:tflite,代码行数:10,代码来源:detect_image.py

示例7: _load_tflite

# 需要导入模块: from tflite_runtime import interpreter [as 别名]
# 或者: from tflite_runtime.interpreter import load_delegate [as 别名]
def _load_tflite(self, tflite_path):
    experimental_delegates = []
    try:
      experimental_delegates.append(
          tflite.load_delegate(
              EDGETPU_SHARED_LIB,
              {'device': self._config.device} if self._config.device else {}))
    except AttributeError as e:
      if '\'Delegate\' object has no attribute \'_library\'' in str(e):
        print(
            'Warning: EdgeTPU library not found. You can still run CPU models, '
            'but if you have a Coral device make sure you set it up: '
            'https://coral.ai/docs/setup/.')
    except ValueError as e:
      if 'Failed to load delegate from ' in str(e):
        print(
            'Warning: EdgeTPU library not found. You can still run CPU models, '
            'but if you have a Coral device make sure you set it up: '
            'https://coral.ai/docs/setup/.')

    try:
      self._interpreter = tflite.Interpreter(
          model_path=tflite_path, experimental_delegates=experimental_delegates)
    except TypeError as e:
      if 'got an unexpected keyword argument \'experimental_delegates\'' in str(
          e):
        self._interpreter = tflite.Interpreter(model_path=tflite_path)
    try:
      self._interpreter.allocate_tensors()
    except RuntimeError as e:
      if 'edgetpu-custom-op' in str(e) or 'EdgeTpuDelegateForCustomOp' in str(
          e):
        raise RuntimeError('Loaded an EdgeTPU model without the EdgeTPU '
                           'library loaded. If you have a Coral device make '
                           'sure you set it up: https://coral.ai/docs/setup/.')
      else:
        raise e
    self._is_lstm = self._check_lstm()
    if self._is_lstm:
      print('Loading an LSTM model.')
      self._lstm_c = np.copy(self.input_tensor(1))
      self._lstm_h = np.copy(self.input_tensor(2)) 
开发者ID:google,项目名称:automl-video-ondevice,代码行数:44,代码来源:tflite_object_detection.py

示例8: main

# 需要导入模块: from tflite_runtime import interpreter [as 别名]
# 或者: from tflite_runtime.interpreter import load_delegate [as 别名]
def main():
    ap = argparse.ArgumentParser()
    ap.add_argument('--model',
        type=str,
        default=None,
        help='tflite model to evaluate')
    ap.add_argument('--dataset',
        default='/mnt/dataset/',
        help='location of evaluation dataset')
    args = vars(ap.parse_args())

    logging.basicConfig(format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
        level=logging.DEBUG)

    # Let model on command line override default from config.
    if args['model'] is not None:
        model = args['model']
    else:
        model = DEFAULT_MODEL

    logger.info('Evaluating tflite model: {} on dataset: {}'
        .format(model, args['dataset']))

    # Grab test images paths.
    imagePaths = glob(args['dataset'] + '/**/*.*', recursive=True)

    # Create a test image generator comprehension.
    # Generates (image path, image label) tuples.
    test_gen = ((path.abspath(imagePath), imagePath.split(path.sep)[-2])
        for imagePath in imagePaths)

    # Start the tflite interpreter on the tpu and allocate tensors.
    interpreter = tflite.Interpreter(model_path=model,
        experimental_delegates=[tflite.load_delegate('libedgetpu.so.1')])
    interpreter.allocate_tensors()

    logger.info(interpreter.get_input_details())
    logger.info(interpreter.get_output_details())

    # Compute accuracy on the test image set.
    accuracy, inference_time = evaluate_model(interpreter=interpreter,
        test_gen=test_gen)

    num_images = len(imagePaths)

    logger.info('accuracy: {:.4f}, num test images: {}, inferences / sec: {:.4f}'
        .format(accuracy, num_images, num_images / inference_time)) 
开发者ID:goruck,项目名称:smart-zoneminder,代码行数:49,代码来源:evaluate_model.py


注:本文中的tflite_runtime.interpreter.load_delegate方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。