本文整理汇总了Python中utils.cython_nms.nms函数的典型用法代码示例。如果您正苦于以下问题:Python nms函数的具体用法?Python nms怎么用?Python nms使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了nms函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: apply_nms
def apply_nms(all_boxes, thresh):
"""Apply non-maximum suppression to all predicted boxes output by the
test_net method.
"""
num_classes = len(all_boxes)
num_images = len(all_boxes[0])
nms_boxes = [[[] for _ in xrange(num_images)]
for _ in xrange(num_classes)]
for cls_ind in xrange(num_classes):
for im_ind in xrange(num_images):
dets = all_boxes[cls_ind][im_ind]
if dets == []:
continue
x1 = dets[:, 0]
y1 = dets[:, 1]
x2 = dets[:, 2]
y2 = dets[:, 3]
scores = dets[:, 4]
inds = np.where((x2 > x1) & (y2 > y1) & (scores > cfg.TEST.DET_THRESHOLD))[0]
dets = dets[inds,:]
if dets == []:
continue
keep = nms(dets, thresh)
if len(keep) == 0:
continue
nms_boxes[cls_ind][im_ind] = dets[keep, :].copy()
return nms_boxes
示例2: demoRest
def demoRest(net, image_name, classes, box_file, obj_proposals, im_file, im):
# Detect all object classes and regress object bounds
timer = Timer()
timer.tic()
scores, boxes = im_detect(net, im, obj_proposals)
timer.toc()
print ('Detection took {:.3f}s for '
'{:d} object proposals').format(timer.total_time, boxes.shape[0])
# Visualize detections for each class
CONF_THRESH = 0.8
NMS_THRESH = 0.3
for cls in classes:
cls_ind = CLASSES.index(cls)
cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)]
cls_scores = scores[:, cls_ind]
keep = np.where(cls_scores >= CONF_THRESH)[0]
cls_boxes = cls_boxes[keep, :]
cls_scores = cls_scores[keep]
dets = np.hstack((cls_boxes,
cls_scores[:, np.newaxis])).astype(np.float32)
keep = nms(dets, NMS_THRESH)
dets = dets[keep, :]
print 'All {} detections with p({} | box) >= {:.1f}'.format(cls, cls,
CONF_THRESH)
vis_detections(im, cls, dets, thresh=CONF_THRESH)
示例3: demo
def demo(net, im, scale_factor, classes):
"""Detect object classes in an image using pre-computed object proposals."""
im2 = cv2.resize(im, (0,0), fx=1.0/scale_factor, fy=1.0/scale_factor)
obj_proposals_in = []
dlib.find_candidate_object_locations(im2, obj_proposals_in, min_size=70)
obj_proposals = np.empty((len(obj_proposals_in),4))
for idx in range(len(obj_proposals_in)):
obj_proposals[idx] = [obj_proposals_in[idx].left(), obj_proposals_in[idx].top(), obj_proposals_in[idx].right(), obj_proposals_in[idx].bottom()]
# Detect all object classes and regress object bounds
scores, boxes = im_detect(net, im2, obj_proposals)
# Visualize detections for each class
CONF_THRESH = 0.8
NMS_THRESH = 0.3
for cls in classes:
cls_ind = CLASSES.index(cls)
cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)]
cls_scores = scores[:, cls_ind]
dets = np.hstack((cls_boxes,
cls_scores[:, np.newaxis])).astype(np.float32)
keep = nms(dets, NMS_THRESH)
dets = dets[keep, :]
return [im2, cls, dets, CONF_THRESH]
示例4: detect
def detect(self, img):
bbox = self.bbox(img)
scores, boxes = im_detect(self.net, img, bbox)
result = []
CONF_THRESH = 0.8
NMS_THRESH = 0.3
for cls in self.CLASSES[1:]:
cls_ind = self.CLASSES.index(cls)
cls_boxes = boxes[:, 4 * cls_ind:4 * (cls_ind + 1)]
cls_scores = scores[:, cls_ind]
dets = np.hstack((cls_boxes,
cls_scores[:, np.newaxis])).astype(np.float32)
keep = nms(dets, NMS_THRESH)
dets = dets[keep, :]
inds = np.where(dets[:, -1] >= CONF_THRESH)[0]
if len(inds) == 0:
continue
for i in inds:
bbox = dets[i, :4]
x1, y1, x2, y2 = map(int, bbox)
result.append({
"label": cls,
"bbox": [x1, y1, x2, y2]
})
return result
示例5: demo
def demo(net, image_name, classes):
"""Detect object classes in an image using pre-computed object proposals."""
# Load pre-computed Selected Search object proposals
box_file = os.path.join(cfg.ROOT_DIR, 'data', 'demo',
image_name + '_boxes.mat')
obj_proposals = sio.loadmat(box_file)['boxes']
# Load the demo image
im_file = os.path.join(cfg.ROOT_DIR, 'data', 'demo', image_name + '.jpg')
im = cv2.imread(im_file)
# Detect all object classes and regress object bounds
timer = Timer()
timer.tic()
scores, boxes = im_detect(net, im, obj_proposals)
timer.toc()
print ('Detection took {:.3f}s for '
'{:d} object proposals').format(timer.total_time, boxes.shape[0])
# Visualize detections for each class
CONF_THRESH = 0.8
NMS_THRESH = 0.3
for cls in classes:
cls_ind = CLASSES.index(cls)
cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)]
cls_scores = scores[:, cls_ind]
dets = np.hstack((cls_boxes,
cls_scores[:, np.newaxis])).astype(np.float32)
keep = nms(dets, NMS_THRESH)
dets = dets[keep, :]
print 'All {} detections with p({} | box) >= {:.1f}'.format(cls, cls,
CONF_THRESH)
vis_detections(im, cls, dets, thresh=CONF_THRESH)
示例6: detect_one
def detect_one(name, thresh=0.75):
im = cv2.imread(osp.join('/home/hezheqi/data/tmp/p2', name))
scores, polys = im_detect(sess, net, im)
print(scores)
boxes = np.zeros((polys.shape[0], 8), dtype=polys.dtype)
boxes[:, 0] = np.min(polys[:, 0:8:2], axis=1)
boxes[:, 1] = np.min(polys[:, 1:8:2], axis=1)
boxes[:, 2] = np.max(polys[:, 0:8:2], axis=1)
boxes[:, 3] = np.max(polys[:, 1:8:2], axis=1)
boxes[:, 4] = np.min(polys[:, 8::2], axis=1)
boxes[:, 5] = np.min(polys[:, 9::2], axis=1)
boxes[:, 6] = np.max(polys[:, 8::2], axis=1)
boxes[:, 7] = np.max(polys[:, 9::2], axis=1)
for j in range(1, NUM_CLASSES):
inds = np.where(scores[:, j] > thresh)[0]
cls_scores = scores[inds, j]
cls_boxes = boxes[inds, j * 4:(j + 1) * 4]
cls_polys = polys[inds, j * 8:(j + 1) * 8]
cls_dets = np.hstack((cls_boxes, cls_scores[:, np.newaxis])) \
.astype(np.float32, copy=False)
cls_dets_poly = cls_polys.astype(np.float32, copy=False)
keep = nms(cls_dets, cfg.TEST.NMS)
# cls_dets = cls_dets[keep, :]
cls_dets = cls_boxes[keep, :]
cls_dets_poly = cls_dets_poly[keep, :]
cls_scores = cls_scores[:, np.newaxis]
cls_scores = cls_scores[keep, :]
cls_dets = np.hstack((cls_dets, cls_dets_poly, cls_scores))
print(cls_dets)
vis_detections(im, cls_dets)
cv2.imwrite(osp.join(out_dir, name), im)
fout = open(osp.join(out_dir, 'txt', name[:-4]+'.txt'), 'w')
for det in cls_dets:
fout.write('{}\n'.format(' '.join(str(int(d)) for d in det[4:12])))
示例7: recognize_img
def recognize_img(net, image_name, box_file, classes):
obj_proposals = sio.loadmat(box_file)['boxes']
# Load the demo image
im = cv2.imread(image_name)
# Detect all object classes and regress object bounds
scores, boxes = im_detect(net, im, obj_proposals)
#print type(boxes)
#dims = boxes.shape
#rows = dims[0]
#cols = dims[1]
# Visualize detections for each class
CONF_THRESH = 0.85
NMS_THRESH = 0.3
data_list = []
for cls in classes:
cls_ind = CLASSES.index(cls)
cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)]
cls_scores = scores[:, cls_ind]
keep = np.where(cls_scores >= CONF_THRESH)[0]
cls_boxes = cls_boxes[keep, :]
cls_scores = cls_scores[keep]
dets = np.hstack((cls_boxes, cls_scores[:, np.newaxis])).astype(np.float32)
keep = nms(dets, NMS_THRESH)
dets = dets[keep, :]
tmplist = get_detection_box(cls, dets, thresh=CONF_THRESH)
if len(tmplist) == 0:
continue
data_list.extend(tmplist)
data_list.sort(key=lambda obj:obj.get('xoffset'), reverse=False)
#data_list = char_roi_filter(data_list)
str = ''
for elem in data_list:
str = str + elem.get('char')
return str
示例8: apply_nms
def apply_nms(all_boxes, thresh,intra_class_nms=False):
"""Apply non-maximum suppression to all predicted boxes output."""
num_classes = len(all_boxes)
num_images = len(all_boxes[0])
nms_boxes = [[[] for _ in xrange(num_images)]
for _ in xrange(num_classes)]
for im_ind in xrange(num_images):
for cls_ind in xrange(num_classes):
dets = all_boxes[cls_ind][im_ind]
if dets == []:
continue
if not 'keep_box_all_class' in vars():
dets_aug = dets
else:
dets_aug = np.row_stack((keep_box_all_class,dets))
keep = nms(dets_aug, thresh)
if len(keep) == 0:continue
if intra_class_nms:
keep_box_all_class = dets_aug[keep, :].copy()
else:
nms_boxes[cls_ind][im_ind] = dets_aug[keep, :].copy()
if intra_class_nms:
#run over all classes to match image with class
keep_set = set([tuple(x) for x in keep_box_all_class])
for cls_ind in xrange(num_classes):
class_set = set([tuple(x) for x in all_boxes[cls_ind][im_ind]])
nms_boxes[cls_ind][im_ind] = np.array([x for x in class_set & keep_set]).copy()
del keep_box_all_class
return nms_boxes
示例9: demo
def demo(net, image_name, classes, ssdir, imgdir, normdir, savefile):
"""Detect object classes in an image using pre-computed object proposals."""
box_file = os.path.join(ssdir, image_name + '.mat')
obj_proposals = sio.loadmat(box_file)['boxes']
# Load the demo image
im_file = os.path.join(imgdir, image_name + '.jpg')
im = cv2.imread(im_file)
#print(np.shape(im))
# Load the demo image
norm_file = os.path.join(normdir, image_name + '.jpg')
norm_im = cv2.imread(norm_file)
norm_im = cv2.resize(norm_im, (im.shape[0], im.shape[1]) )
im = (im, norm_im)
# Detect all object classes and regress object bounds
timer = Timer()
timer.tic()
scores, boxes = im_detect(net, im, obj_proposals)
timer.toc()
# print ('Detection took {:.3f}s for '
# '{:d} object proposals').format(timer.total_time, boxes.shape[0])
# Visualize detections for each class
CONF_THRESH = 0.8
NMS_THRESH = 0.3
thresh = 0.3
fid = open(savefile,'w')
cnt = 0
for cls in classes:
cnt = cnt + 1
cls_ind = CLASSES.index(cls)
cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)]
cls_scores = scores[:, cls_ind]
dets = np.hstack((cls_boxes,
cls_scores[:, np.newaxis])).astype(np.float32)
keep = nms(dets, NMS_THRESH)
dets = dets[keep, :]
inds = np.where(dets[:, -1] >= thresh)[0]
for i in inds:
bbox = dets[i, :4]
score = dets[i, -1]
fid.write('{0:d}'.format(cnt))
fid.write(' ')
fid.write('{0:.3f}'.format(score))
for j in range(4):
fid.write(' ')
fid.write('{0:.3f}'.format(bbox[j]))
fid.write('\n')
fid.close()
示例10: demo
def demo(net, image_name, box_file, out_img, classes):
obj_proposals = sio.loadmat(box_file)['boxes']
# Load the demo image
im_file = image_name#os.path.join(cfg.ROOT_DIR, 'data', 'demo', image_name + '.jpg')
im = cv2.imread(im_file)
# Detect all object classes and regress object bounds
timer = Timer()
timer.tic()
scores, boxes = im_detect(net, im, obj_proposals)
#print type(boxes)
dims = boxes.shape
print dims
rows = dims[0]
cols = dims[1]
#for elem in boxes.flat:
# print elem
print '-===-=-==-==-=-====================--------'
timer.toc()
print ('Detection took {:.3f}s for '
'{:d} object proposals').format(timer.total_time, boxes.shape[0])
# Visualize detections for each class
CONF_THRESH = 0.85
NMS_THRESH = 0.3
img = im[:, :, (2, 1, 0)]
fig, ax = plt.subplots(figsize=(12, 12))
ax.imshow(im, aspect='equal')
data_list = [];
for cls in classes:
cls_ind = CLASSES.index(cls)
cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)]
#print cls_boxes
#print '================='
cls_scores = scores[:, cls_ind]
keep = np.where(cls_scores >= CONF_THRESH)[0]
#print cls
cls_boxes = cls_boxes[keep, :]
cls_scores = cls_scores[keep]
dets = np.hstack((cls_boxes, cls_scores[:, np.newaxis])).astype(np.float32)
keep = nms(dets, NMS_THRESH)
dets = dets[keep, :]
tmplist = vis_detections(ax, cls, dets, thresh=CONF_THRESH)
if len(tmplist) == 0:
continue
data_list.extend(tmplist)
#print data_list
#print '====================='
plt.savefig(out_img)
data_list.sort(key=lambda obj:obj.get('xoffset'), reverse=False)
str = ''
for elem in data_list:
str = str + elem.get('char')
return str
示例11: test_net
def test_net(sess, net, imdb, weights_filename, max_per_image=100, thresh=0.05):
np.random.seed(cfg.RNG_SEED)
"""Test a Fast R-CNN network on an image database."""
num_images = len(imdb.image_index)
# all detections are collected into:
# all_boxes[cls][image] = N x 5 array of detections in
# (x1, y1, x2, y2, score)
all_boxes = [[[] for _ in range(num_images)]
for _ in range(imdb.num_classes)]
output_dir = get_output_dir(imdb, weights_filename)
# timers
_t = {'im_detect' : Timer(), 'misc' : Timer()}
for i in range(num_images):
im = cv2.imread(imdb.image_path_at(i))
_t['im_detect'].tic()
scores, boxes = im_detect(sess, net, im)
_t['im_detect'].toc()
_t['misc'].tic()
# skip j = 0, because it's the background class
for j in range(1, imdb.num_classes):
inds = np.where(scores[:, j] > thresh)[0]
cls_scores = scores[inds, j]
cls_boxes = boxes[inds, j*4:(j+1)*4]
cls_dets = np.hstack((cls_boxes, cls_scores[:, np.newaxis])) \
.astype(np.float32, copy=False)
keep = nms(cls_dets, cfg.TEST.NMS)
cls_dets = cls_dets[keep, :]
all_boxes[j][i] = cls_dets
# Limit to max_per_image detections *over all classes*
if max_per_image > 0:
image_scores = np.hstack([all_boxes[j][i][:, -1]
for j in range(1, imdb.num_classes)])
if len(image_scores) > max_per_image:
image_thresh = np.sort(image_scores)[-max_per_image]
for j in range(1, imdb.num_classes):
keep = np.where(all_boxes[j][i][:, -1] >= image_thresh)[0]
all_boxes[j][i] = all_boxes[j][i][keep, :]
_t['misc'].toc()
print('im_detect: {:d}/{:d} {:.3f}s {:.3f}s' \
.format(i + 1, num_images, _t['im_detect'].average_time,
_t['misc'].average_time))
det_file = os.path.join(output_dir, 'detections.pkl')
with open(det_file, 'wb') as f:
pickle.dump(all_boxes, f, pickle.HIGHEST_PROTOCOL)
print('Evaluating detections')
imdb.evaluate_detections(all_boxes, output_dir)
示例12: demo
def demo(net, image_name, classes):
"""Detect object classes in an image using pre-computed object proposals."""
# Load pre-computed Selected Search object proposals
# box_file = os.path.join(cfg.ROOT_DIR, 'data', 'demo',image_name + '_boxes.mat')
test_mats_path = '/home/tanshen/fast-rcnn/data/kaggle/test_bbox'
box_file = os.path.join(test_mats_path ,image_name + '_boxes.mat')
obj_proposals = sio.loadmat(box_file)['boxes']
# Load the demo image
test_images_path = '/home/tanshen/fast-rcnn/data/kaggle/ImagesTest'
# im_file = os.path.join(cfg.ROOT_DIR, 'data', 'demo', image_name + '.jpg')
im_file = os.path.join(test_images_path, image_name + '.jpg')
im = cv2.imread(im_file)
# Detect all object classes and regress object bounds
timer = Timer()
timer.tic()
scores, boxes = im_detect(net, im, obj_proposals)
timer.toc()
# print ('Detection took {:.3f}s for '
# '{:d} object proposals').format(timer.total_time, boxes.shape[0])
# Visualize detections for each class
CONF_THRESH = 0
NMS_THRESH = 0.3
max_inds = 0
max_score = 0.0
for cls in classes:
cls_ind = CLASSES.index(cls)
cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)]
cls_scores = scores[:, cls_ind]
keep = np.where(cls_scores >= CONF_THRESH)[0]
cls_boxes = cls_boxes[keep, :]
cls_scores = cls_scores[keep]
dets = np.hstack((cls_boxes,
cls_scores[:, np.newaxis])).astype(np.float32)
keep = nms(dets, NMS_THRESH)
dets = dets[keep, :]
# print 'All {} detections with p({} | box) >= {:.1f} in {}'.format(cls, cls,
# CONF_THRESH, image_name)
#if get_max!=[]:
[ind,tmp]=get_max(im, cls, dets, thresh=CONF_THRESH)
#print image_name,cls,tmp
#vis_detections(im, cls, dets, image_name, thresh=CONF_THRESH)
#print dets[:,-1]
#print image_name,max_score
file.writelines([image_name,'\t',cls,'\t',str(tmp),'\n'])
if(max_score<tmp):
max_score=tmp
cls_max=cls
print image_name,cls_max,max_score
示例13: runDetection
def runDetection (net, basePath, testFileName,classes):
ftest = open(testFileName,'r')
imageFileName = basePath+'/' + ftest.readline().strip()
num = 1
outputFile = open('CarDetectionResult_window_30000.txt','w')
while imageFileName:
print imageFileName
print 'now is ',num
num += 1
imageFileBaseName = os.path.basename(imageFileName)
imageFileDir = os.path.dirname(imageFileName)
boxFileName = imageFileDir +'/'+imageFileBaseName.replace('.jpg','_boxes.mat')
print boxFileName
obj_proposals = sio.loadmat(boxFileName)['boxes']
#obj_proposals[:,2] = obj_proposals[:, 2] + obj_proposals[:, 0]
#obj_proposals[:,3] = obj_proposals[:, 3] + obj_proposals[:, 1]
im = cv2.imread(imageFileName)
timer = Timer()
timer.tic()
scores, boxes = im_detect(net, im, obj_proposals)
timer.toc()
print ('Detection took {:.3f} for '
'{:d} object proposals').format(timer.total_time, boxes.shape[0])
CONF_THRESH = 0.8
NMS_THRESH = 0.3
for cls in classes:
cls_ind = CLASSES.index(cls)
cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)]
cls_scores = scores[:, cls_ind]
dets = np.hstack((cls_boxes,
cls_scores[:, np.newaxis])).astype(np.float32)
keep = nms(dets, NMS_THRESH)
dets = dets[keep, :]
print 'All {} detections with p({} | box) >= {:.1f}'.format(cls, cls,
CONF_THRESH)
inds = np.where(dets[:, -1] >= CONF_THRESH)[0]
print 'Detected car number ', inds.size
if len(inds) != 0:
outputFile.write(imageFileName+' ')
outputFile.write(str(inds.size)+' ')
for i in inds:
bbox = dets[i, :4]
outputFile.write(str(int(bbox[0]))+' '+ str(int(bbox[1]))+' '+ str(int(bbox[2]))+' '+ str(int(bbox[3]))+' ')
outputFile.write('\n')
else:
outputFile.write(imageFileName +' 0' '\n')
temp = ftest.readline().strip()
if temp:
imageFileName = basePath+'/' + temp
else:
break
示例14: detect
def detect(self, image_name, mode, mixed=True):
# DJDJ
# Load the demo image
#im_file = os.path.join(cfg.ROOT_DIR, 'data', 'demo', image_name)
#im = cv2.imread(im_file)
im = cv2.imread(image_name)
# Detect all object classes and regress object bounds
for i in range(1):
timer = Timer()
timer.tic()
if mixed:
scores, boxes = im_detect_mixed(self.net, im)
else:
scores, boxes = im_detect(self.net, im, obj_proposals)
timer.toc()
print ('Detection took {:.3f}s for '
'{:d} object proposals').format(timer.total_time, boxes.shape[0])
# Visualize detections for each class
CONF_THRESH = 0.8
NMS_THRESH = 0.3
timer = Timer()
result = {}
if mode == '3': # Car mode
classes = CLASSES_CAR
else:
classes = CLASSES
for cls in CLASSES:
if mode == '3' and (cls in CLASSES_CAR) == False: # Car mode
continue
cls_ind = CLASSES.index(cls)
cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)]
cls_scores = scores[:, cls_ind]
dets = np.hstack((cls_boxes,
cls_scores[:, np.newaxis])).astype(np.float32)
timer.tic()
keep = nms(dets, NMS_THRESH)
timer.toc()
dets = dets[keep, :]
result[cls_ind] = dets
#print 'All {} detections with p({} | box) >= {:.1f}'.format(cls, cls, CONF_THRESH)
#vis_detections(im, cls, dets, thresh=CONF_THRESH)
#print ('nms took {:.3f}s').format(timer.total_time)
return result
示例15: recognize_checkcode_img
def recognize_checkcode_img(net, image_name, classes):
boxes = get_selective_search_boxes(image_name)
if boxes is None:
dict = {}
dict['ccvalue'] = ''
dict['rects'] = []
dict['code'] = 1
return dict
#im = cv2.imread(image_name)
im = load_image(image_name)
#print im
#print type(im)
#print im.shape
#cv2.imwrite('asasdf.jpg', im)
scores, boxes = im_detect(net, im, boxes)
CONF_THRESH = 0.5
NMS_THRESH = 0.1
data_list = []
for cls in classes:
cls_ind = CLASSES.index(cls)
cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)]
cls_scores = scores[:, cls_ind]
keep = np.where(cls_scores >= CONF_THRESH)[0]
cls_boxes = cls_boxes[keep, :]
cls_scores = cls_scores[keep]
dets = np.hstack((cls_boxes, cls_scores[:, np.newaxis])).astype(np.float32)
keep = nms(dets, NMS_THRESH)
dets = dets[keep, :]
tmplist = get_detection_box(cls, dets, thresh=CONF_THRESH)
if len(tmplist) == 0:
continue
data_list.extend(tmplist)
data_list.sort(key=lambda obj:obj.get('xoffset'), reverse=False)
#
#print data_list
#print len(data_list)
#print '-=-=-=-=-=-=-=-='
data_list = rect_filter(data_list, 0.85)
#print len(data_list)
#print '-=-=-=-=-=-=-=-='
data_list = char_roi_filter(data_list)
#print len(res_list)
#print '-=-=-=-=-=-=-=-='
str = ''
for elem in data_list:
str = str + elem.get('char')
#print res_list
dict = {}
dict['ccvalue'] = str
dict['rects'] = data_list
dict['code'] = 0
#print dict
return dict