12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879 |
- from quantization import *
- import os, argparse, math, torch
- import numpy as np
- from yolo import non_max_suppression
- PROJECT_NAME = os.environ['PROJECT_NAME']
- qk_file = PROJECT_NAME+'/model_quantization/checkpoint_quan.qk'
- # qk_file = '/root/eeasy/eeasy_quan/yolov8toezb_ygy/model_quantization/checkpoint_quan.qk'
- def make_anchor(input_shape=(640, 640), grid_cell_offset=0.5):
- anchor_points = []
- for i in [8, 16, 32]:
- h, w = input_shape[0] // i, input_shape[1] // i
- sx = np.arange(w) + grid_cell_offset
- sy = np.arange(h) + grid_cell_offset
- sy, sx = np.meshgrid(sy, sx)
- anchor_points.append(np.stack((sy, sx), -1).reshape((-1, 2)))
- return np.transpose(np.concatenate(anchor_points), axes=[1, 0])
- def yololayer(res):
- strides = np.load('py/tensor.npy', allow_pickle=True)
- anchor_points = make_anchor()
- x1y1 = anchor_points - res[:, :2]
- x2y2 = anchor_points + res[:, 2:4]
- c_xy = (x1y1 + x2y2) / 2
- wh = x2y2 - x1y1
- res[:, :4] = np.concatenate((c_xy, wh), axis=1) * strides
- # res[:, :4] = np.concatenate((x1y1, x2y2), axis=1) * strides
- return res
- if __name__ == '__main__':
- shapes = Helper.get_caffe_output_shapes(qk_file)
- # bin_path = ['model_pet/_share_res_hw__model.22_Concat.bin', 'model_pet/_share_res_hw__model.22_Concat_1.bin', 'model_pet/_share_res_hw__model.22_Concat_2.bin']
- bin_path = ['model_pet/sim/res_hw/_model.22_Concat.bin', 'model_pet/sim/res_hw/_model.22_Concat_1.bin', 'model_pet/sim/res_hw/_model.22_Concat_2.bin']
- name = ['/model.22/Concat', '/model.22/Concat_1', '/model.22/Concat_2']
- sim_res1 = np.fromfile(bin_path[0], dtype=np.int8 if Helper.get_quantize_out_bw(qk_file, name[0]) == 8 else np.int16) # /model.0/conv/Conv
- bin_res1 = Helper.hw_data_to_caffe_int_data(sim_res1, shapes[name[0]])
-
- sim_res2 = np.fromfile(bin_path[1], dtype=np.int8 if Helper.get_quantize_out_bw(qk_file, name[1]) == 8 else np.int16) # /model.0/conv/Conv
- bin_res2 = Helper.hw_data_to_caffe_int_data(sim_res2, shapes[name[1]])
-
- sim_res3 = np.fromfile(bin_path[2], dtype=np.int8 if Helper.get_quantize_out_bw(qk_file, name[2]) == 8 else np.int16) # /model.0/conv/Conv
- bin_res3 = Helper.hw_data_to_caffe_int_data(sim_res3, shapes[name[2]])
-
- bin_res1, bin_res2, bin_res3 = bin_res1 / math.pow(2, 3), bin_res2 / math.pow(2, 3), bin_res3 / math.pow(2, 3)
-
- # # ( 1, c, w, h)
- # '/model.22/Concat': (1, 6, 80, 80)
-
- res_python = []
- for data in [bin_res1, bin_res2, bin_res3]:
- for i in range(data.shape[2]):
- for j in range(data.shape[3]):
- res_python.append(data[0, :, i, j])
- res_python = np.stack(res_python) # 8400,6
- res_python = np.transpose(res_python, axes=[1, 0]) # 6,8400
- res_python = res_python[None] # 1,6,8400
- res_python = yololayer(res_python) # 1,6,8400
- res_python = non_max_suppression(torch.from_numpy(res_python))[0]
- print(res_python.shape)
- print(res_python)
- # res_python = np.transpose(res_python[0], axes=[1, 0]) # 8400,6
-
- # res_c = []
- # for data in [sim_res1, sim_res2, sim_res3]:
- # for i in range(0, data.shape[0], 16):
- # res_c.append(data[i:i+6])
- # res_c = np.stack(res_c) # 8400,6
-
- with open('out.txt') as f:
- data = np.array(list(map(lambda x:np.array(x.strip().split(), dtype=np.float), f.readlines())))
- print(data.shape, res_python.shape)
-
- # print(np.sum(np.abs(res_c - res_python)))
- # print(np.sum(np.abs(res_python - data)))
- # random_index = np.arange(8400)
- # # np.random.shuffle(random_index)
- # for i in random_index[:100]:
- # print(i, res_python[i], data[i])
|