當前位置: 首頁>>代碼示例>>Python>>正文


Python interpreter.load_delegate方法代碼示例

本文整理匯總了Python中tflite_runtime.interpreter.load_delegate方法的典型用法代碼示例。如果您正苦於以下問題:Python interpreter.load_delegate方法的具體用法?Python interpreter.load_delegate怎麽用?Python interpreter.load_delegate使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在tflite_runtime.interpreter的用法示例。


在下文中一共展示了interpreter.load_delegate方法的8個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: __init__

# 需要導入模塊: from tflite_runtime import interpreter [as 別名]
# 或者: from tflite_runtime.interpreter import load_delegate [as 別名]
def __init__(self):
        edge_tpu_delegate = None
        try:
            edge_tpu_delegate = load_delegate('libedgetpu.so.1.0')
        except ValueError:
            print("No EdgeTPU detected. Falling back to CPU.")
        
        if edge_tpu_delegate is None:
            self.interpreter = tflite.Interpreter(
                model_path='/cpu_model.tflite')
        else:
            self.interpreter = tflite.Interpreter(
                model_path='/edgetpu_model.tflite',
                experimental_delegates=[edge_tpu_delegate])
        
        self.interpreter.allocate_tensors()

        self.tensor_input_details = self.interpreter.get_input_details()
        self.tensor_output_details = self.interpreter.get_output_details() 
開發者ID:blakeblackshear,項目名稱:frigate,代碼行數:21,代碼來源:edgetpu.py

示例2: _get_edgetpu_interpreter

# 需要導入模塊: from tflite_runtime import interpreter [as 別名]
# 或者: from tflite_runtime.interpreter import load_delegate [as 別名]
def _get_edgetpu_interpreter(model=None):  # pragma: no cover
    # Note: Looking for ideas how to test Coral EdgeTPU dependent code
    # in a cloud CI environment such as Travis CI and Github
    tf_interpreter = None
    if model:
        try:
            edgetpu_delegate = load_delegate('libedgetpu.so.1.0')
            assert edgetpu_delegate
            tf_interpreter = Interpreter(
                model_path=model,
                experimental_delegates=[edgetpu_delegate]
                )
            log.debug('EdgeTPU available. Will use EdgeTPU model.')
        except Exception as e:
            log.debug('EdgeTPU init error: %r', e)
            # log.debug(stacktrace())
    return tf_interpreter 
開發者ID:ambianic,項目名稱:ambianic-edge,代碼行數:19,代碼來源:inference.py

示例3: loadModel

# 需要導入模塊: from tflite_runtime import interpreter [as 別名]
# 或者: from tflite_runtime.interpreter import load_delegate [as 別名]
def loadModel(dP):
    if dP.TFliteRuntime:
        import tflite_runtime.interpreter as tflite
        # model here is intended as interpreter
        if dP.runCoralEdge:
            print(" Running on Coral Edge TPU")
            try:
                model = tflite.Interpreter(model_path=os.path.splitext(dP.model_name)[0]+'_edgetpu.tflite',
                    experimental_delegates=[tflite.load_delegate(dP.edgeTPUSharedLib,{})])
            except:
                print(" Coral Edge TPU not found. Please make sure it's connected. ")
        else:
            model = tflite.Interpreter(model_path=os.path.splitext(dP.model_name)[0]+'.tflite')
        model.allocate_tensors()
    else:
        getTFVersion(dP)
        import tensorflow as tf
        if dP.useTFlitePred:
            # model here is intended as interpreter
            model = tf.lite.Interpreter(model_path=os.path.splitext(dP.model_name)[0]+'.tflite')
            model.allocate_tensors()
        else:
            model = tf.keras.models.load_model(dP.model_name)
    return model

#************************************
# Make prediction based on framework
#************************************ 
開發者ID:feranick,項目名稱:SpectralMachine,代碼行數:30,代碼來源:libSpectraKeras.py

示例4: __init__

# 需要導入模塊: from tflite_runtime import interpreter [as 別名]
# 或者: from tflite_runtime.interpreter import load_delegate [as 別名]
def __init__(self):
        # Load TFLite model and allocate tensors.
        self.interpreter = tflite.Interpreter(model_path=PERSON_CLASS_MODEL,
            experimental_delegates=[tflite.load_delegate('libedgetpu.so.1')])
        self.interpreter.allocate_tensors()

        # Get input and output tensors.
        self.input_details = self.interpreter.get_input_details()
        self.output_details = self.interpreter.get_output_details() 
開發者ID:goruck,項目名稱:smart-zoneminder,代碼行數:11,代碼來源:detect_servers_tpu.py

示例5: make_interpreter

# 需要導入模塊: from tflite_runtime import interpreter [as 別名]
# 或者: from tflite_runtime.interpreter import load_delegate [as 別名]
def make_interpreter(model_file):
    model_file, *device = model_file.split('@')
    return tflite.Interpreter(
      model_path=model_file,
      experimental_delegates=[
          tflite.load_delegate(EDGETPU_SHARED_LIB,
                               {'device': device[0]} if device else {})
      ]) 
開發者ID:google-coral,項目名稱:examples-camera,代碼行數:10,代碼來源:common.py

示例6: make_interpreter

# 需要導入模塊: from tflite_runtime import interpreter [as 別名]
# 或者: from tflite_runtime.interpreter import load_delegate [as 別名]
def make_interpreter(model_file):
  model_file, *device = model_file.split('@')
  return tflite.Interpreter(
      model_path=model_file,
      experimental_delegates=[
          tflite.load_delegate(EDGETPU_SHARED_LIB,
                               {'device': device[0]} if device else {})
      ]) 
開發者ID:google-coral,項目名稱:tflite,代碼行數:10,代碼來源:detect_image.py

示例7: _load_tflite

# 需要導入模塊: from tflite_runtime import interpreter [as 別名]
# 或者: from tflite_runtime.interpreter import load_delegate [as 別名]
def _load_tflite(self, tflite_path):
    experimental_delegates = []
    try:
      experimental_delegates.append(
          tflite.load_delegate(
              EDGETPU_SHARED_LIB,
              {'device': self._config.device} if self._config.device else {}))
    except AttributeError as e:
      if '\'Delegate\' object has no attribute \'_library\'' in str(e):
        print(
            'Warning: EdgeTPU library not found. You can still run CPU models, '
            'but if you have a Coral device make sure you set it up: '
            'https://coral.ai/docs/setup/.')
    except ValueError as e:
      if 'Failed to load delegate from ' in str(e):
        print(
            'Warning: EdgeTPU library not found. You can still run CPU models, '
            'but if you have a Coral device make sure you set it up: '
            'https://coral.ai/docs/setup/.')

    try:
      self._interpreter = tflite.Interpreter(
          model_path=tflite_path, experimental_delegates=experimental_delegates)
    except TypeError as e:
      if 'got an unexpected keyword argument \'experimental_delegates\'' in str(
          e):
        self._interpreter = tflite.Interpreter(model_path=tflite_path)
    try:
      self._interpreter.allocate_tensors()
    except RuntimeError as e:
      if 'edgetpu-custom-op' in str(e) or 'EdgeTpuDelegateForCustomOp' in str(
          e):
        raise RuntimeError('Loaded an EdgeTPU model without the EdgeTPU '
                           'library loaded. If you have a Coral device make '
                           'sure you set it up: https://coral.ai/docs/setup/.')
      else:
        raise e
    self._is_lstm = self._check_lstm()
    if self._is_lstm:
      print('Loading an LSTM model.')
      self._lstm_c = np.copy(self.input_tensor(1))
      self._lstm_h = np.copy(self.input_tensor(2)) 
開發者ID:google,項目名稱:automl-video-ondevice,代碼行數:44,代碼來源:tflite_object_detection.py

示例8: main

# 需要導入模塊: from tflite_runtime import interpreter [as 別名]
# 或者: from tflite_runtime.interpreter import load_delegate [as 別名]
def main():
    ap = argparse.ArgumentParser()
    ap.add_argument('--model',
        type=str,
        default=None,
        help='tflite model to evaluate')
    ap.add_argument('--dataset',
        default='/mnt/dataset/',
        help='location of evaluation dataset')
    args = vars(ap.parse_args())

    logging.basicConfig(format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
        level=logging.DEBUG)

    # Let model on command line override default from config.
    if args['model'] is not None:
        model = args['model']
    else:
        model = DEFAULT_MODEL

    logger.info('Evaluating tflite model: {} on dataset: {}'
        .format(model, args['dataset']))

    # Grab test images paths.
    imagePaths = glob(args['dataset'] + '/**/*.*', recursive=True)

    # Create a test image generator comprehension.
    # Generates (image path, image label) tuples.
    test_gen = ((path.abspath(imagePath), imagePath.split(path.sep)[-2])
        for imagePath in imagePaths)

    # Start the tflite interpreter on the tpu and allocate tensors.
    interpreter = tflite.Interpreter(model_path=model,
        experimental_delegates=[tflite.load_delegate('libedgetpu.so.1')])
    interpreter.allocate_tensors()

    logger.info(interpreter.get_input_details())
    logger.info(interpreter.get_output_details())

    # Compute accuracy on the test image set.
    accuracy, inference_time = evaluate_model(interpreter=interpreter,
        test_gen=test_gen)

    num_images = len(imagePaths)

    logger.info('accuracy: {:.4f}, num test images: {}, inferences / sec: {:.4f}'
        .format(accuracy, num_images, num_images / inference_time)) 
開發者ID:goruck,項目名稱:smart-zoneminder,代碼行數:49,代碼來源:evaluate_model.py


注:本文中的tflite_runtime.interpreter.load_delegate方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。