export-rknn.py
3.0 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
#!/usr/bin/env python3
# Copyright (c) 2025 Xiaomi Corporation (authors: Fangjun Kuang)
import argparse
import logging
from pathlib import Path
from rknn.api import RKNN
logging.basicConfig(level=logging.WARNING)
g_platforms = [
# "rv1103",
# "rv1103b",
# "rv1106",
# "rk2118",
"rk3562",
"rk3566",
"rk3568",
"rk3576",
"rk3588",
]
def get_parser():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument(
"--target-platform",
type=str,
required=True,
help=f"Supported values are: {','.join(g_platforms)}",
)
parser.add_argument(
"--in-model",
type=str,
required=True,
help="Path to the input onnx model",
)
parser.add_argument(
"--out-model",
type=str,
required=True,
help="Path to the output rknn model",
)
return parser
def get_meta_data(model: str):
import onnxruntime
session_opts = onnxruntime.SessionOptions()
session_opts.inter_op_num_threads = 1
session_opts.intra_op_num_threads = 1
m = onnxruntime.InferenceSession(
model,
sess_options=session_opts,
providers=["CPUExecutionProvider"],
)
for i in m.get_inputs():
print(i)
print("-----")
for i in m.get_outputs():
print(i)
print()
meta = m.get_modelmeta().custom_metadata_map
s = ""
sep = ""
for key, value in meta.items():
s = s + sep + f"{key}={value}"
sep = ";"
assert len(s) < 1024
return s
def export_rknn(rknn, filename):
ret = rknn.export_rknn(filename)
if ret != 0:
exit("Export rknn model to {filename} failed!")
def init_model(filename: str, target_platform: str, custom_string=None):
rknn = RKNN(verbose=False)
rknn.config(
optimization_level=0,
target_platform=target_platform,
custom_string=custom_string,
)
if not Path(filename).is_file():
exit(f"{filename} does not exist")
ret = rknn.load_onnx(model=filename)
if ret != 0:
exit(f"Load model {filename} failed!")
ret = rknn.build(do_quantization=False)
if ret != 0:
exit("Build model {filename} failed!")
return rknn
class RKNNModel:
def __init__(
self,
model: str,
target_platform: str,
):
meta = get_meta_data(model)
print(meta)
self.model = init_model(
model,
target_platform=target_platform,
custom_string=meta,
)
def export_rknn(self, model):
export_rknn(self.model, model)
def release(self):
self.model.release()
def main():
args = get_parser().parse_args()
print(vars(args))
model = RKNNModel(
model=args.in_model,
target_platform=args.target_platform,
)
model.export_rknn(
model=args.out_model,
)
model.release()
if __name__ == "__main__":
main()