import platform
|
import cv2
|
import numpy as np
|
import platform
|
from rknnlite.api import RKNNLite
|
|
# decice tree for rk356x/rk3588
|
DEVICE_COMPATIBLE_NODE = '/proc/device-tree/compatible'
|
|
def get_host():
|
# get platform and device type
|
system = platform.system()
|
machine = platform.machine()
|
os_machine = system + '-' + machine
|
if os_machine == 'Linux-aarch64':
|
try:
|
with open(DEVICE_COMPATIBLE_NODE) as f:
|
device_compatible_str = f.read()
|
if 'rk3588' in device_compatible_str:
|
host = 'RK3588'
|
else:
|
host = 'RK356x'
|
except IOError:
|
print('Read device node {} failed.'.format(DEVICE_COMPATIBLE_NODE))
|
exit(-1)
|
else:
|
host = os_machine
|
return host
|
|
INPUT_SIZE = 224
|
|
RK356X_RKNN_MODEL = 'resnet18_for_rk356x.rknn'
|
RK3588_RKNN_MODEL = 'resnet18_for_rk3588.rknn'
|
|
|
def show_top5(result):
|
output = result[0].reshape(-1)
|
# softmax
|
output = np.exp(output)/sum(np.exp(output))
|
output_sorted = sorted(output, reverse=True)
|
top5_str = 'resnet18\n-----TOP 5-----\n'
|
for i in range(5):
|
value = output_sorted[i]
|
index = np.where(output == value)
|
for j in range(len(index)):
|
if (i + j) >= 5:
|
break
|
if value > 0:
|
topi = '{}: {}\n'.format(index[j], value)
|
else:
|
topi = '-1: 0.0\n'
|
top5_str += topi
|
print(top5_str)
|
|
|
if __name__ == '__main__':
|
|
host_name = get_host()
|
if host_name == 'RK356x':
|
rknn_model = RK356X_RKNN_MODEL
|
elif host_name == 'RK3588':
|
rknn_model = RK3588_RKNN_MODEL
|
else:
|
print("This demo cannot run on the current platform: {}".format(host_name))
|
exit(-1)
|
|
rknn_lite = RKNNLite()
|
|
# load RKNN model
|
print('--> Load RKNN model')
|
ret = rknn_lite.load_rknn(rknn_model)
|
if ret != 0:
|
print('Load RKNN model failed')
|
exit(ret)
|
print('done')
|
|
ori_img = cv2.imread('./space_shuttle_224.jpg')
|
img = cv2.cvtColor(ori_img, cv2.COLOR_BGR2RGB)
|
|
# init runtime environment
|
print('--> Init runtime environment')
|
# run on RK356x/RK3588 with Debian OS, do not need specify target.
|
if host_name == 'RK3588':
|
ret = rknn_lite.init_runtime(core_mask=RKNNLite.NPU_CORE_0)
|
else:
|
ret = rknn_lite.init_runtime()
|
if ret != 0:
|
print('Init runtime environment failed')
|
exit(ret)
|
print('done')
|
|
# Inference
|
print('--> Running model')
|
outputs = rknn_lite.inference(inputs=[img])
|
show_top5(outputs)
|
print('done')
|
|
rknn_lite.release()
|