Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions wbia/control/manual_image_funcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -381,6 +381,7 @@ def add_images(
ensure_unique=False,
ensure_loadable=True,
ensure_exif=True,
image_uuid_list=None,
**kwargs,
):
r"""
Expand Down Expand Up @@ -460,6 +461,25 @@ def add_images(

logger.info('Using cache_uri_dict = {}'.format(ut.repr3(cache_uri_dict)))

# Override hash-computed UUIDs with caller-provided ones.
# A None element means "use the hash-computed UUID for this image".
if image_uuid_list is not None:
if len(image_uuid_list) != len(params_list):
raise ValueError(
'image_uuid_list length %d != params_list length %d'
% (len(image_uuid_list), len(params_list))
)
new_params_list = []
for custom_uuid, (gpath_, params_) in zip(image_uuid_list, params_list):
if params_ is None and custom_uuid is not None:
logger.warning(
'Custom UUID %s dropped: image failed to load' % (custom_uuid,)
)
elif params_ is not None and custom_uuid is not None:
params_ = (custom_uuid,) + params_[1:]
new_params_list.append((gpath_, params_))
params_list = new_params_list

# <DEBUG>
debug = False
if debug:
Expand Down
138 changes: 126 additions & 12 deletions wbia/core_annots.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,11 +353,35 @@ def compute_chip(depc, aid_list, config=None):
bbox_list = ibs.get_annot_bboxes(aid_list)
theta_list = ibs.get_annot_thetas(aid_list)

result_list = gen_chip_configure_and_compute(
ibs, gid_list, aid_list, bbox_list, theta_list, config
)
for result in result_list:
yield result
# Filter out annotations with zero-area bounding boxes. We must still
# yield one result per input aid so the depcache stays aligned, so yield
# None for invalid annotations (the depcache's filter_Nones handles them).
bbox_sizes = ut.take_column(bbox_list, [2, 3])
valid_flags = [w != 0 and h != 0 for (w, h) in bbox_sizes]
invalid_aids = ut.compress(aid_list, [not f for f in valid_flags])
if len(invalid_aids) > 0:
logger.warning(
'Skipping %d annotations with zero-area bounding boxes: %r'
% (len(invalid_aids), invalid_aids)
)

valid_gids = ut.compress(gid_list, valid_flags)
valid_aids = ut.compress(aid_list, valid_flags)
valid_bboxes = ut.compress(bbox_list, valid_flags)
valid_thetas = ut.compress(theta_list, valid_flags)

# Build a lookup from aid -> result for valid annotations
valid_results = {}
if len(valid_aids) > 0:
result_list = gen_chip_configure_and_compute(
ibs, valid_gids, valid_aids, valid_bboxes, valid_thetas, config
)
for aid, result in zip(valid_aids, result_list):
valid_results[aid] = result

# Yield in original order: real result for valid aids, None for invalid
for aid in aid_list:
yield valid_results.get(aid, None)
logger.info('Done Preprocessing Chips')


Expand Down Expand Up @@ -1836,7 +1860,37 @@ def compute_classifications(depc, aid_list, config=None):
chip_filepath_list = depc.get_property(
'chips', aid_list, 'img', config=config2, read_extern=False, ensure=True
)
result_list = densenet.test(chip_filepath_list, **config) # yield detections

# Filter out annotations with missing chip paths; yield None for
# skipped rows so the depcache skips them without caching a fake result.
valid_indices = set()
skipped_aids = []
for i, p in enumerate(chip_filepath_list):
if p is not None:
valid_indices.add(i)
else:
skipped_aids.append(aid_list[i])
if len(valid_indices) < len(chip_filepath_list):
logger.warning(
'Skipping %d/%d annotations with missing chip paths: aids=%r'
% (len(skipped_aids), len(chip_filepath_list), skipped_aids)
)
valid_chip_list = [chip_filepath_list[i] for i in sorted(valid_indices)]
else:
valid_chip_list = chip_filepath_list

if valid_chip_list:
valid_results = list(densenet.test(valid_chip_list, **config))
else:
valid_results = []

valid_iter = iter(valid_results)
result_list = []
for i in range(len(chip_filepath_list)):
if i in valid_indices:
result_list.append(next(valid_iter))
else:
result_list.append(None)
else:
raise ValueError(
'specified classifier algo is not supported in config = {!r}'.format(config)
Expand Down Expand Up @@ -2019,11 +2073,41 @@ def compute_labels_annotations(depc, aid_list, config=None):
chip_filepath_list = depc.get_property(
'chips', aid_list, 'img', config=config_, read_extern=False, ensure=True
)

# Filter out annotations with missing chip paths
valid_indices = set()
skipped_aids = []
for i, p in enumerate(chip_filepath_list):
if p is not None:
valid_indices.add(i)
else:
skipped_aids.append(aid_list[i])
if len(valid_indices) < len(chip_filepath_list):
logger.warning(
'Skipping %d/%d annotations with missing chip paths: aids=%r'
% (len(skipped_aids), len(chip_filepath_list), skipped_aids)
)
valid_chip_list = [chip_filepath_list[i] for i in sorted(valid_indices)]
else:
valid_chip_list = chip_filepath_list

config = dict(config)
config['classifier_weight_filepath'] = config['labeler_weight_filepath']
result_gen = efficientnet.test_dict(
chip_filepath_list, return_dict=True, **config
)

if valid_chip_list:
valid_results = list(efficientnet.test_dict(
valid_chip_list, return_dict=True, **config
))
else:
valid_results = []

valid_iter = iter(valid_results)
result_gen = []
for i in range(len(chip_filepath_list)):
if i in valid_indices:
result_gen.append(next(valid_iter))
else:
result_gen.append(None)

elif config['labeler_algo'] in ['densenet']:
from wbia.algo.detect import densenet
Expand All @@ -2037,11 +2121,41 @@ def compute_labels_annotations(depc, aid_list, config=None):
chip_filepath_list = depc.get_property(
'chips', aid_list, 'img', config=config_, read_extern=False, ensure=True
)

# Filter out annotations with missing chip paths
valid_indices = set()
skipped_aids = []
for i, p in enumerate(chip_filepath_list):
if p is not None:
valid_indices.add(i)
else:
skipped_aids.append(aid_list[i])
if len(valid_indices) < len(chip_filepath_list):
logger.warning(
'Skipping %d/%d annotations with missing chip paths: aids=%r'
% (len(skipped_aids), len(chip_filepath_list), skipped_aids)
)
valid_chip_list = [chip_filepath_list[i] for i in sorted(valid_indices)]
else:
valid_chip_list = chip_filepath_list

config = dict(config)
config['classifier_weight_filepath'] = config['labeler_weight_filepath']
result_gen = densenet.test_dict(
chip_filepath_list, return_dict=True, **config
)

if valid_chip_list:
valid_results = list(densenet.test_dict(
valid_chip_list, return_dict=True, **config
))
else:
valid_results = []

valid_iter = iter(valid_results)
result_gen = []
for i in range(len(chip_filepath_list)):
if i in valid_indices:
result_gen.append(next(valid_iter))
else:
result_gen.append(None)
else:
labeler_weight_filepath = config['labeler_weight_filepath']
labeler_weight_filepath = labeler_weight_filepath.strip()
Expand Down
20 changes: 19 additions & 1 deletion wbia/web/apis_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ def add_imagesets_json(
def add_images_json(
ibs,
image_uri_list,
image_uuid_list=None,
image_unixtime_list=None,
image_gps_lat_list=None,
image_gps_lon_list=None,
Expand Down Expand Up @@ -210,7 +211,6 @@ def _verify(list_, tag, length, allow_none=False):
kwargs['sanitize'] = kwargs.get('sanitize', False)

depricated_list = [
'image_uuid_list',
'image_width_list',
'image_height_list',
'image_orig_name_list',
Expand All @@ -236,6 +236,24 @@ def _verify(list_, tag, length, allow_none=False):
# Rectify values
image_uri_list = _rectify_uri(image_uri_list, None, expected_length, str)
image_uri_list = _verify(image_uri_list, 'image_uri_list', expected_length)

if image_uuid_list is not None:
import uuid as uuid_module

if len(image_uuid_list) != expected_length:
raise ValueError(
'image_uuid_list length %d != image_uri_list length %d'
% (len(image_uuid_list), expected_length)
)
try:
image_uuid_list = [
uuid_module.UUID(u) if isinstance(u, str) else u
for u in image_uuid_list
]
except (ValueError, AttributeError) as ex:
raise ValueError('Invalid UUID in image_uuid_list: %s' % (ex,))
kwargs['image_uuid_list'] = image_uuid_list

gid_list = ibs.add_images(image_uri_list, **kwargs) # NOQA

if image_unixtime_list is not None:
Expand Down
Loading