diff --git a/examples/rknn_convert/models/tensorflow/mobilenet-ssd/model_config.yml b/examples/rknn_convert/models/tensorflow/mobilenet-ssd/model_config.yml index 57d0465..162dbcc 100644 --- a/examples/rknn_convert/models/tensorflow/mobilenet-ssd/model_config.yml +++ b/examples/rknn_convert/models/tensorflow/mobilenet-ssd/model_config.yml @@ -6,8 +6,8 @@ models: subgraphs: inputs: - FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_0/BatchNorm/batchnorm/mul_1 - input-size-list: - - 300,300,3 + input_tensor_shapes: + - [300, 300, 3] outputs: - concat - concat_1 diff --git a/examples/rknn_convert/rknn_convert.py b/examples/rknn_convert/rknn_convert.py index 2b66200..5a1ac61 100644 --- a/examples/rknn_convert/rknn_convert.py +++ b/examples/rknn_convert/rknn_convert.py @@ -1,89 +1,95 @@ #!/usr/bin/env python3 -import os import sys - -#import yaml +import os +import argparse import ruamel.yaml from rknn.api import RKNN -yaml = ruamel.yaml.YAML(typ='rt') -def parse_model_config(yaml_config_file): - with open(yaml_config_file) as f: - yaml_config = f.read() - model_configs = yaml.load(yaml_config) - return model_configs +def parse_model_config(config_file): + config_text = "" + with open(config_file) as f: + config_text = f.read() + if config_text: + yaml = ruamel.yaml.YAML(typ='rt') + return yaml.load(config_text) -def convert_model(model_path, out_path, pre_compile): - if os.path.isfile(model_path): - yaml_config_file = model_path - model_path = os.path.dirname(yaml_config_file) - else: - yaml_config_file = os.path.join(model_path, 'model_config.yml') - if not os.path.exists(yaml_config_file): - print('model config {} not exist!'.format(yaml_config_file)) - exit(-1) +def convert_model(config_file, out_path, pre_compile): - model_configs = parse_model_config(yaml_config_file) + config_path = os.path.dirname(config_file) - exported_rknn_model_path_list = [] + exported_rknn_model_paths = [] + config = parse_model_config(config_file) + if config is None: + print('Invalid configuration.') + return exported_rknn_model_paths - for model_name in model_configs['models']: - model = model_configs['models'][model_name] + for model_name in config['models']: + model = config['models'][model_name] rknn = RKNN() rknn.config(**model['configs']) - print('--> Loading model...') + print('--> Load model...') + model_file_path = os.path.join(config_path, model['model_file_path']) if model['platform'] == 'tensorflow': - model_file_path = os.path.join(model_path, model['model_file_path']) - input_size_list = [] - for input_size_str in model['subgraphs']['input-size-list']: - input_size = list(map(int, input_size_str.split(','))) - input_size_list.append(input_size) - pass + subgraphs = model['subgraphs'] rknn.load_tensorflow(tf_pb=model_file_path, - inputs=model['subgraphs']['inputs'], - outputs=model['subgraphs']['outputs'], - input_size_list=input_size_list) + inputs=subgraphs['inputs'], + outputs=subgraphs['outputs'], + input_size_list=subgraphs['input_tensor_shapes']) elif model['platform'] == 'tflite': - model_file_path = os.path.join(model_path, model['model_file_path']) rknn.load_tflite(model=model_file_path) - elif model['platform'] == 'caffe': - prototxt_file_path = os.path.join(model_path,model['prototxt_file_path']) - caffemodel_file_path = os.path.join(model_path,model['caffemodel_file_path']) - rknn.load_caffe(model=prototxt_file_path, proto='caffe', blobs=caffemodel_file_path) elif model['platform'] == 'onnx': - model_file_path = os.path.join(model_path, model['model_file_path']) rknn.load_onnx(model=model_file_path) + elif model['platform'] == 'caffe': + prototxt_file_path = os.path.join(config_path, model['prototxt_file_path']) + caffemodel_file_path = os.path.join(config_path, model['caffemodel_file_path']) + rknn.load_caffe(model=prototxt_file_path, proto='caffe', blobs=caffemodel_file_path) else: - print("platform %s not support!" % (model['platform'])) - print('done') + print("Platform {:} is not supported! Moving on.".format(model['platform'])) + continue + print('Done') if model['quantize']: - dataset_path = os.path.join(model_path, model['dataset']) + dataset_path = os.path.join(config_path, model['dataset']) else: dataset_path = './dataset' print('--> Build RKNN model...') rknn.build(do_quantization=model['quantize'], dataset=dataset_path, pre_compile=pre_compile) - print('done') + print('Done') - export_rknn_model_path = "%s.rknn" % (os.path.join(out_path, model_name)) - print('--> Export RKNN model to: {}'.format(export_rknn_model_path)) + export_rknn_model_path = "{:}.rknn".format(os.path.join(out_path, model_name)) + print('--> Export RKNN model to: {:}'.format(export_rknn_model_path)) rknn.export_rknn(export_path=export_rknn_model_path) - exported_rknn_model_path_list.append(export_rknn_model_path) - print('done') + exported_rknn_model_paths.append(export_rknn_model_path) + print('Done') + + return exported_rknn_model_paths + - return exported_rknn_model_path_list +def parse_args(*argv): + parser = argparse.ArgumentParser(description="Build RKNN models") + parser.add_argument("-c", "--config", required=True) + parser.add_argument("-o", "--out_dir", required=True) + parser.add_argument("-p", "--precompile", action="store_true") + args = parser.parse_args(argv) + + if not os.path.isfile(args.config): + print("Enter an existing config file.") + sys.exit(-1) + return args.config, args.out_dir, args.precompile if __name__ == '__main__': - model_path = sys.argv[1] - out_path = sys.argv[2] - pre_compile = sys.argv[3] in ['true', '1', 'True'] + config_file, out_path, pre_compile = parse_args(*sys.argv[1:]) + #print(config_file, out_path, pre_compile) + + if out_path: + os.makedirs(out_path, exist_ok=True) - convert_model(model_path, out_path, pre_compile) + convert_model(config_file, out_path, pre_compile)