forked from tylin/coco-caption
-
Notifications
You must be signed in to change notification settings - Fork 83
/
coco_eval_example.py
25 lines (19 loc) · 847 Bytes
/
coco_eval_example.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
from pycocotools.coco import COCO
from pycocoevalcap.eval import COCOEvalCap
annotation_file = 'captions_val2014.json'
results_file = 'captions_val2014_fakecap_results.json'
# create coco object and coco_result object
coco = COCO(annotation_file)
coco_result = coco.loadRes(results_file)
# create coco_eval object by taking coco and coco_result
coco_eval = COCOEvalCap(coco, coco_result)
# evaluate on a subset of images by setting
# coco_eval.params['image_id'] = coco_result.getImgIds()
# please remove this line when evaluating the full validation set
coco_eval.params['image_id'] = coco_result.getImgIds()
# evaluate results
# SPICE will take a few minutes the first time, but speeds up due to caching
coco_eval.evaluate()
# print output evaluation scores
for metric, score in coco_eval.eval.items():
print(f'{metric}: {score:.3f}')