Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Datumaro] Update COCO tests #1669

Merged
merged 1 commit into from
Jun 10, 2020
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
189 changes: 44 additions & 145 deletions datumaro/tests/test_coco_format.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
import json
import numpy as np
import os
import os.path as osp

from unittest import TestCase
Expand All @@ -19,94 +17,13 @@
CocoLabelsConverter,
)
from datumaro.plugins.coco_format.importer import CocoImporter
from datumaro.util.image import save_image, Image
from datumaro.util.image import Image
from datumaro.util.test_utils import TestDir, compare_datasets


class CocoImporterTest(TestCase):
@staticmethod
def generate_annotation():
annotation = {
'licenses': [],
'info': {},
'categories': [],
'images': [],
'annotations': [],
}
annotation['licenses'].append({
'name': '',
'id': 0,
'url': '',
})
annotation['info'] = {
'contributor': '',
'date_created': '',
'description': '',
'url': '',
'version': '',
'year': '',
}
annotation['licenses'].append({
'name': '',
'id': 0,
'url': '',
})
annotation['categories'].append({
'id': 1,
'name': 'TEST',
'supercategory': '',
})
annotation['images'].append({
"id": 1,
"width": 5,
"height": 10,
"file_name": '000000000001.jpg',
"license": 0,
"flickr_url": '',
"coco_url": '',
"date_captured": 0,
})
annotation['annotations'].append({
"id": 1,
"image_id": 1,
"category_id": 1,
"segmentation": [[0, 0, 1, 0, 1, 2, 0, 2]],
"area": 2,
"bbox": [0, 0, 1, 2],
"iscrowd": 0,
})
annotation['annotations'].append({
"id": 2,
"image_id": 1,
"category_id": 1,
"segmentation": {
"counts": [
0, 10,
5, 5,
5, 5,
0, 10,
10, 0],
"size": [10, 5]},
"area": 30,
"bbox": [0, 0, 10, 4],
"iscrowd": 1,
})
return annotation

def COCO_dataset_generate(self, path):
img_dir = osp.join(path, 'images', 'val')
ann_dir = osp.join(path, 'annotations')
os.makedirs(img_dir)
os.makedirs(ann_dir)

image = np.ones((10, 5, 3))
save_image(osp.join(img_dir, '000000000001.jpg'), image)

annotation = self.generate_annotation()

with open(osp.join(ann_dir, 'instances_val.json'), 'w') as outfile:
json.dump(annotation, outfile)
DUMMY_DATASET_DIR = osp.join(osp.dirname(__file__), 'assets', 'coco_dataset')

class CocoImporterTest(TestCase):
def test_can_import(self):
class DstExtractor(Extractor):
def __iter__(self):
Expand All @@ -129,18 +46,13 @@ def categories(self):
label_cat.add('TEST')
return { AnnotationType.label: label_cat }

with TestDir() as test_dir:
self.COCO_dataset_generate(test_dir)

dataset = Project.import_from(test_dir, 'coco').make_dataset()
dataset = Project.import_from(DUMMY_DATASET_DIR, 'coco') \
.make_dataset()

compare_datasets(self, DstExtractor(), dataset)
compare_datasets(self, DstExtractor(), dataset)

def test_can_detect(self):
with TestDir() as test_dir:
self.COCO_dataset_generate(test_dir)

self.assertTrue(CocoImporter.detect(test_dir))
self.assertTrue(CocoImporter.detect(DUMMY_DATASET_DIR))

class CocoConverterTest(TestCase):
def _test_save_and_load(self, source_dataset, converter, test_dir,
Expand Down Expand Up @@ -282,7 +194,7 @@ def test_can_merge_polygons_on_loading(self):
label_categories.add(str(i))
categories = { AnnotationType.label: label_categories }

class TestExtractor(Extractor):
class SrcExtractor(Extractor):
def __iter__(self):
return iter([
DatasetItem(id=1, image=np.zeros((6, 10, 3)),
Expand All @@ -298,30 +210,35 @@ def __iter__(self):
def categories(self):
return categories

class TargetExtractor(TestExtractor):
class DstExtractor(Extractor):
def __iter__(self):
items = list(super().__iter__())
items[0]._annotations = [
Mask(np.array([
[0, 1, 1, 1, 0, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 0, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]],
# only internal fragment (without the border),
# but not everywhere...
),
label=3, id=4, group=4,
attributes={ 'is_crowd': False }),
]
return iter(items)
return iter([
DatasetItem(id=1, image=np.zeros((6, 10, 3)),
annotations=[
Mask(np.array([
[0, 1, 1, 1, 0, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 0, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]],
# only internal fragment (without the border),
# but not everywhere...
),
label=3, id=4, group=4,
attributes={ 'is_crowd': False }),
]
),
])

def categories(self):
return categories

with TestDir() as test_dir:
self._test_save_and_load(TestExtractor(),
self._test_save_and_load(SrcExtractor(),
CocoInstancesConverter(), test_dir,
importer_args={'merge_instance_polygons': True},
target_dataset=TargetExtractor())
target_dataset=DstExtractor())

def test_can_crop_covered_segments(self):
label_categories = LabelCategories()
Expand Down Expand Up @@ -435,7 +352,7 @@ def test_can_convert_masks_to_polygons(self):
for i in range(10):
label_categories.add(str(i))

class SrcTestExtractor(Extractor):
class SrcExtractor(Extractor):
def __iter__(self):
return iter([
DatasetItem(id=1, image=np.zeros((5, 10, 3)),
Expand All @@ -455,7 +372,7 @@ def __iter__(self):
def categories(self):
return { AnnotationType.label: label_categories }

class DstTestExtractor(Extractor):
class DstExtractor(Extractor):
def __iter__(self):
return iter([
DatasetItem(id=1, image=np.zeros((5, 10, 3)),
Expand All @@ -476,9 +393,9 @@ def categories(self):
return { AnnotationType.label: label_categories }

with TestDir() as test_dir:
self._test_save_and_load(SrcTestExtractor(),
self._test_save_and_load(SrcExtractor(),
CocoInstancesConverter(segmentation_mode='polygons'), test_dir,
target_dataset=DstTestExtractor())
target_dataset=DstExtractor())

def test_can_save_and_load_images(self):
class TestExtractor(Extractor):
Expand Down Expand Up @@ -506,16 +423,8 @@ def __iter__(self):
annotations=[
Label(4, id=1, group=1),
Label(9, id=2, group=2),
]),
DatasetItem(id=2, subset='train',
annotations=[
Label(4, id=4, group=4),
]),

DatasetItem(id=3, subset='val',
annotations=[
Label(2, id=1, group=1),
]),
]
),
])

def categories(self):
Expand Down Expand Up @@ -555,21 +464,16 @@ def __iter__(self):
# Full instance annotations: bbox + keypoints
Points([1, 2, 3, 4, 2, 3], group=2, id=2),
Bbox(1, 2, 2, 2, group=2, id=2),
]),
DatasetItem(id=2, subset='train', image=np.zeros((5, 4, 3)),
annotations=[

# Solitary keypoints
Points([1, 2, 0, 2, 4, 1], label=5, id=3),

# Some other solitary annotations (bug #1387)
Polygon([0, 0, 4, 0, 4, 4], label=3, id=4),
]),

DatasetItem(id=3, subset='val',
annotations=[
# Solitary keypoints with no label
Points([0, 0, 1, 2, 3, 4], [0, 1, 2], id=3),
]),
Points([0, 0, 1, 2, 3, 4], [0, 1, 2], id=5),
])
])

def categories(self):
Expand All @@ -593,24 +497,19 @@ def __iter__(self):
Polygon([1, 2, 3, 2, 3, 4, 1, 4],
group=2, id=2,
attributes={'is_crowd': False}),
]),
DatasetItem(id=2, subset='train',
annotations=[

Points([1, 2, 0, 2, 4, 1],
label=5, group=3, id=3,
attributes={'is_crowd': False}),
Polygon([0, 1, 4, 1, 4, 2, 0, 2],
label=5, group=3, id=3,
attributes={'is_crowd': False}),
]),

DatasetItem(id=3, subset='val',
annotations=[
Points([0, 0, 1, 2, 3, 4], [0, 1, 2],
group=3, id=3,
group=5, id=5,
attributes={'is_crowd': False}),
Polygon([1, 2, 3, 2, 3, 4, 1, 4],
group=3, id=3,
group=5, id=5,
attributes={'is_crowd': False}),
]),
])
Expand Down