Phil2020 glenn-jocher commited on
Commit
53349da
1 Parent(s): f2ca30a

Scope TF imports in `DetectMultiBackend()` (#5792)

Browse files

* tensorflow or tflite exclusively as interpreter

As per bug report https://github.com/ultralytics/yolov5/issues/5709 I think there should be only one attempt to assign interpreter, and it appears tflite is only ever needed for the case of edgetpu model.

* Scope imports

* Nested definition line fix

* Update common.py

Co-authored-by: Glenn Jocher <[email protected]>

Files changed (1) hide show
  1. models/common.py +5 -2
models/common.py CHANGED
@@ -337,19 +337,21 @@ class DetectMultiBackend(nn.Module):
337
  context = model.create_execution_context()
338
  batch_size = bindings['images'].shape[0]
339
  else: # TensorFlow model (TFLite, pb, saved_model)
340
- import tensorflow as tf
341
  if pb: # https://www.tensorflow.org/guide/migrate#a_graphpb_or_graphpbtxt
 
 
 
342
  def wrap_frozen_graph(gd, inputs, outputs):
343
  x = tf.compat.v1.wrap_function(lambda: tf.compat.v1.import_graph_def(gd, name=""), []) # wrapped
344
  return x.prune(tf.nest.map_structure(x.graph.as_graph_element, inputs),
345
  tf.nest.map_structure(x.graph.as_graph_element, outputs))
346
 
347
- LOGGER.info(f'Loading {w} for TensorFlow *.pb inference...')
348
  graph_def = tf.Graph().as_graph_def()
349
  graph_def.ParseFromString(open(w, 'rb').read())
350
  frozen_func = wrap_frozen_graph(gd=graph_def, inputs="x:0", outputs="Identity:0")
351
  elif saved_model:
352
  LOGGER.info(f'Loading {w} for TensorFlow saved_model inference...')
 
353
  model = tf.keras.models.load_model(w)
354
  elif tflite: # https://www.tensorflow.org/lite/guide/python#install_tensorflow_lite_for_python
355
  if 'edgetpu' in w.lower():
@@ -361,6 +363,7 @@ class DetectMultiBackend(nn.Module):
361
  interpreter = tfli.Interpreter(model_path=w, experimental_delegates=[tfli.load_delegate(delegate)])
362
  else:
363
  LOGGER.info(f'Loading {w} for TensorFlow Lite inference...')
 
364
  interpreter = tf.lite.Interpreter(model_path=w) # load TFLite model
365
  interpreter.allocate_tensors() # allocate
366
  input_details = interpreter.get_input_details() # inputs
 
337
  context = model.create_execution_context()
338
  batch_size = bindings['images'].shape[0]
339
  else: # TensorFlow model (TFLite, pb, saved_model)
 
340
  if pb: # https://www.tensorflow.org/guide/migrate#a_graphpb_or_graphpbtxt
341
+ LOGGER.info(f'Loading {w} for TensorFlow *.pb inference...')
342
+ import tensorflow as tf
343
+
344
  def wrap_frozen_graph(gd, inputs, outputs):
345
  x = tf.compat.v1.wrap_function(lambda: tf.compat.v1.import_graph_def(gd, name=""), []) # wrapped
346
  return x.prune(tf.nest.map_structure(x.graph.as_graph_element, inputs),
347
  tf.nest.map_structure(x.graph.as_graph_element, outputs))
348
 
 
349
  graph_def = tf.Graph().as_graph_def()
350
  graph_def.ParseFromString(open(w, 'rb').read())
351
  frozen_func = wrap_frozen_graph(gd=graph_def, inputs="x:0", outputs="Identity:0")
352
  elif saved_model:
353
  LOGGER.info(f'Loading {w} for TensorFlow saved_model inference...')
354
+ import tensorflow as tf
355
  model = tf.keras.models.load_model(w)
356
  elif tflite: # https://www.tensorflow.org/lite/guide/python#install_tensorflow_lite_for_python
357
  if 'edgetpu' in w.lower():
 
363
  interpreter = tfli.Interpreter(model_path=w, experimental_delegates=[tfli.load_delegate(delegate)])
364
  else:
365
  LOGGER.info(f'Loading {w} for TensorFlow Lite inference...')
366
+ import tensorflow as tf
367
  interpreter = tf.lite.Interpreter(model_path=w) # load TFLite model
368
  interpreter.allocate_tensors() # allocate
369
  input_details = interpreter.get_input_details() # inputs