76
76
77
77
def export_formats ():
78
78
# YOLOv5 export formats
79
- x = [['PyTorch' , '-' , '.pt' , True ],
80
- ['TorchScript' , 'torchscript' , '.torchscript' , True ],
81
- ['ONNX' , 'onnx' , '.onnx' , True ],
82
- ['OpenVINO' , 'openvino' , '_openvino_model' , False ],
83
- ['TensorRT' , 'engine' , '.engine' , True ],
84
- ['CoreML' , 'coreml' , '.mlmodel' , False ],
85
- ['TensorFlow SavedModel' , 'saved_model' , '_saved_model' , True ],
86
- ['TensorFlow GraphDef' , 'pb' , '.pb' , True ],
87
- ['TensorFlow Lite' , 'tflite' , '.tflite' , False ],
88
- ['TensorFlow Edge TPU' , 'edgetpu' , '_edgetpu.tflite' , False ],
79
+ x = [['PyTorch' , '-' , '.pt' , True ], ['TorchScript' , 'torchscript' , '.torchscript' , True ],
80
+ ['ONNX' , 'onnx' , '.onnx' , True ], ['OpenVINO' , 'openvino' , '_openvino_model' , False ],
81
+ ['TensorRT' , 'engine' , '.engine' , True ], ['CoreML' , 'coreml' , '.mlmodel' , False ],
82
+ ['TensorFlow SavedModel' , 'saved_model' , '_saved_model' , True ], ['TensorFlow GraphDef' , 'pb' , '.pb' , True ],
83
+ ['TensorFlow Lite' , 'tflite' , '.tflite' , False ], ['TensorFlow Edge TPU' , 'edgetpu' , '_edgetpu.tflite' , False ],
89
84
['TensorFlow.js' , 'tfjs' , '_web_model' , False ]]
90
85
return pd .DataFrame (x , columns = ['Format' , 'Argument' , 'Suffix' , 'GPU' ])
91
86
@@ -119,14 +114,25 @@ def export_onnx(model, im, file, opset, train, dynamic, simplify, prefix=colorst
119
114
LOGGER .info (f'\n { prefix } starting export with onnx { onnx .__version__ } ...' )
120
115
f = file .with_suffix ('.onnx' )
121
116
122
- torch .onnx .export (model , im , f , verbose = False , opset_version = opset ,
123
- training = torch .onnx .TrainingMode .TRAINING if train else torch .onnx .TrainingMode .EVAL ,
124
- do_constant_folding = not train ,
125
- input_names = ['images' ],
126
- output_names = ['output' ],
127
- dynamic_axes = {'images' : {0 : 'batch' , 2 : 'height' , 3 : 'width' }, # shape(1,3,640,640)
128
- 'output' : {0 : 'batch' , 1 : 'anchors' } # shape(1,25200,85)
129
- } if dynamic else None )
117
+ torch .onnx .export (
118
+ model ,
119
+ im ,
120
+ f ,
121
+ verbose = False ,
122
+ opset_version = opset ,
123
+ training = torch .onnx .TrainingMode .TRAINING if train else torch .onnx .TrainingMode .EVAL ,
124
+ do_constant_folding = not train ,
125
+ input_names = ['images' ],
126
+ output_names = ['output' ],
127
+ dynamic_axes = {
128
+ 'images' : {
129
+ 0 : 'batch' ,
130
+ 2 : 'height' ,
131
+ 3 : 'width' }, # shape(1,3,640,640)
132
+ 'output' : {
133
+ 0 : 'batch' ,
134
+ 1 : 'anchors' } # shape(1,25200,85)
135
+ } if dynamic else None )
130
136
131
137
# Checks
132
138
model_onnx = onnx .load (f ) # load onnx model
@@ -140,10 +146,9 @@ def export_onnx(model, im, file, opset, train, dynamic, simplify, prefix=colorst
140
146
import onnxsim
141
147
142
148
LOGGER .info (f'{ prefix } simplifying with onnx-simplifier { onnxsim .__version__ } ...' )
143
- model_onnx , check = onnxsim .simplify (
144
- model_onnx ,
145
- dynamic_input_shape = dynamic ,
146
- input_shapes = {'images' : list (im .shape )} if dynamic else None )
149
+ model_onnx , check = onnxsim .simplify (model_onnx ,
150
+ dynamic_input_shape = dynamic ,
151
+ input_shapes = {'images' : list (im .shape )} if dynamic else None )
147
152
assert check , 'assert check failed'
148
153
onnx .save (model_onnx , f )
149
154
except Exception as e :
@@ -246,9 +251,18 @@ def export_engine(model, im, file, train, half, simplify, workspace=4, verbose=F
246
251
LOGGER .info (f'\n { prefix } export failure: { e } ' )
247
252
248
253
249
- def export_saved_model (model , im , file , dynamic ,
250
- tf_nms = False , agnostic_nms = False , topk_per_class = 100 , topk_all = 100 , iou_thres = 0.45 ,
251
- conf_thres = 0.25 , keras = False , prefix = colorstr ('TensorFlow SavedModel:' )):
254
+ def export_saved_model (model ,
255
+ im ,
256
+ file ,
257
+ dynamic ,
258
+ tf_nms = False ,
259
+ agnostic_nms = False ,
260
+ topk_per_class = 100 ,
261
+ topk_all = 100 ,
262
+ iou_thres = 0.45 ,
263
+ conf_thres = 0.25 ,
264
+ keras = False ,
265
+ prefix = colorstr ('TensorFlow SavedModel:' )):
252
266
# YOLOv5 TensorFlow SavedModel export
253
267
try :
254
268
import tensorflow as tf
@@ -278,11 +292,10 @@ def export_saved_model(model, im, file, dynamic,
278
292
tfm = tf .Module ()
279
293
tfm .__call__ = tf .function (lambda x : frozen_func (x )[0 ], [spec ])
280
294
tfm .__call__ (im )
281
- tf .saved_model .save (
282
- tfm ,
283
- f ,
284
- options = tf .saved_model .SaveOptions (experimental_custom_gradients = False ) if
285
- check_version (tf .__version__ , '2.6' ) else tf .saved_model .SaveOptions ())
295
+ tf .saved_model .save (tfm ,
296
+ f ,
297
+ options = tf .saved_model .SaveOptions (experimental_custom_gradients = False )
298
+ if check_version (tf .__version__ , '2.6' ) else tf .saved_model .SaveOptions ())
286
299
LOGGER .info (f'{ prefix } export success, saved as { f } ({ file_size (f ):.1f} MB)' )
287
300
return keras_model , f
288
301
except Exception as e :
@@ -352,10 +365,10 @@ def export_edgetpu(keras_model, im, file, prefix=colorstr('Edge TPU:')):
352
365
if subprocess .run (cmd + ' >/dev/null' , shell = True ).returncode != 0 :
353
366
LOGGER .info (f'\n { prefix } export requires Edge TPU compiler. Attempting install from { help_url } ' )
354
367
sudo = subprocess .run ('sudo --version >/dev/null' , shell = True ).returncode == 0 # sudo installed on system
355
- for c in [ 'curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add -' ,
356
- 'echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | sudo tee /etc/ apt/sources.list.d/coral-edgetpu.list ' ,
357
- ' sudo apt-get update ' ,
358
- ' sudo apt-get install edgetpu-compiler'] :
368
+ for c in (
369
+ 'curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add - ' ,
370
+ 'echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | sudo tee /etc/ apt/sources.list.d/coral-edgetpu.list ' ,
371
+ 'sudo apt-get update' , ' sudo apt-get install edgetpu-compiler') :
359
372
subprocess .run (c if sudo else c .replace ('sudo ' , '' ), shell = True , check = True )
360
373
ver = subprocess .run (cmd , shell = True , capture_output = True , check = True ).stdout .decode ().split ()[- 1 ]
361
374
@@ -395,12 +408,10 @@ def export_tfjs(keras_model, im, file, prefix=colorstr('TensorFlow.js:')):
395
408
r'{"outputs": {"Identity.?.?": {"name": "Identity.?.?"}, '
396
409
r'"Identity.?.?": {"name": "Identity.?.?"}, '
397
410
r'"Identity.?.?": {"name": "Identity.?.?"}, '
398
- r'"Identity.?.?": {"name": "Identity.?.?"}}}' ,
399
- r'{"outputs": {"Identity": {"name": "Identity"}, '
411
+ r'"Identity.?.?": {"name": "Identity.?.?"}}}' , r'{"outputs": {"Identity": {"name": "Identity"}, '
400
412
r'"Identity_1": {"name": "Identity_1"}, '
401
413
r'"Identity_2": {"name": "Identity_2"}, '
402
- r'"Identity_3": {"name": "Identity_3"}}}' ,
403
- json )
414
+ r'"Identity_3": {"name": "Identity_3"}}}' , json )
404
415
j .write (subst )
405
416
406
417
LOGGER .info (f'{ prefix } export success, saved as { f } ({ file_size (f ):.1f} MB)' )
@@ -410,7 +421,8 @@ def export_tfjs(keras_model, im, file, prefix=colorstr('TensorFlow.js:')):
410
421
411
422
412
423
@torch .no_grad ()
413
- def run (data = ROOT / 'data/coco128.yaml' , # 'dataset.yaml path'
424
+ def run (
425
+ data = ROOT / 'data/coco128.yaml' , # 'dataset.yaml path'
414
426
weights = ROOT / 'yolov5s.pt' , # weights path
415
427
imgsz = (640 , 640 ), # image (height, width)
416
428
batch_size = 1 , # batch size
@@ -431,8 +443,8 @@ def run(data=ROOT / 'data/coco128.yaml', # 'dataset.yaml path'
431
443
topk_per_class = 100 , # TF.js NMS: topk per class to keep
432
444
topk_all = 100 , # TF.js NMS: topk for all classes to keep
433
445
iou_thres = 0.45 , # TF.js NMS: IoU threshold
434
- conf_thres = 0.25 # TF.js NMS: confidence threshold
435
- ):
446
+ conf_thres = 0.25 , # TF.js NMS: confidence threshold
447
+ ):
436
448
t = time .time ()
437
449
include = [x .lower () for x in include ] # to lowercase
438
450
formats = tuple (export_formats ()['Argument' ][1 :]) # --include arguments
@@ -495,9 +507,16 @@ def run(data=ROOT / 'data/coco128.yaml', # 'dataset.yaml path'
495
507
if int8 or edgetpu : # TFLite --int8 bug https://github.com/ultralytics/yolov5/issues/5707
496
508
check_requirements (('flatbuffers==1.12' ,)) # required before `import tensorflow`
497
509
assert not (tflite and tfjs ), 'TFLite and TF.js models must be exported separately, please pass only one type.'
498
- model , f [5 ] = export_saved_model (model .cpu (), im , file , dynamic , tf_nms = nms or agnostic_nms or tfjs ,
499
- agnostic_nms = agnostic_nms or tfjs , topk_per_class = topk_per_class ,
500
- topk_all = topk_all , conf_thres = conf_thres , iou_thres = iou_thres ) # keras model
510
+ model , f [5 ] = export_saved_model (model .cpu (),
511
+ im ,
512
+ file ,
513
+ dynamic ,
514
+ tf_nms = nms or agnostic_nms or tfjs ,
515
+ agnostic_nms = agnostic_nms or tfjs ,
516
+ topk_per_class = topk_per_class ,
517
+ topk_all = topk_all ,
518
+ conf_thres = conf_thres ,
519
+ iou_thres = iou_thres ) # keras model
501
520
if pb or tfjs : # pb prerequisite to tfjs
502
521
f [6 ] = export_pb (model , im , file )
503
522
if tflite or edgetpu :
@@ -542,7 +561,8 @@ def parse_opt():
542
561
parser .add_argument ('--topk-all' , type = int , default = 100 , help = 'TF.js NMS: topk for all classes to keep' )
543
562
parser .add_argument ('--iou-thres' , type = float , default = 0.45 , help = 'TF.js NMS: IoU threshold' )
544
563
parser .add_argument ('--conf-thres' , type = float , default = 0.25 , help = 'TF.js NMS: confidence threshold' )
545
- parser .add_argument ('--include' , nargs = '+' ,
564
+ parser .add_argument ('--include' ,
565
+ nargs = '+' ,
546
566
default = ['torchscript' , 'onnx' ],
547
567
help = 'torchscript, onnx, openvino, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs' )
548
568
opt = parser .parse_args ()
0 commit comments