Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
113 changes: 83 additions & 30 deletions tools/visual-pipeline-and-platform-evaluation-tool/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -518,6 +518,50 @@ def update_inferencing_channels_label():
return gr.update(minimum=1, value=8, label=INFERENCING_CHANNELS_LABEL)


def select_preferred_devices(device_list):
"""
Returns:
preferred_device: str - preferred device for inference (GPU > NPU > CPU)
non_npu_preferred_device: str - preferred device for inference, but never NPU (GPU > CPU)
"""
# Select preferred device for inference
# 1. If any discrete GPU, pick the one with the smallest gpu_id
# 2. If any GPU, pick the one with the smallest gpu_id
# 3. Else pick NPU
# 4. Else pick CPU

# Find discrete GPUs
discrete_gpus = [
d
for d in device_list
if d.device_family == DeviceFamily.GPU and d.device_type == DeviceType.DISCRETE
]
if discrete_gpus:
# Pick discrete GPU with smallest gpu_id
preferred_device = min(discrete_gpus, key=lambda d: d.gpu_id).device_name
non_npu_preferred_device = preferred_device
else:
# Find any GPU
gpus = [d for d in device_list if d.device_family == DeviceFamily.GPU]
if gpus:
# Pick GPU with smallest gpu_id
preferred_device = min(gpus, key=lambda d: d.gpu_id).device_name
non_npu_preferred_device = preferred_device
else:
# Find NPU
npus = [d for d in device_list if d.device_family == DeviceFamily.NPU]
if npus:
# Pick first NPU
preferred_device = npus[0].device_name
# For non_npu_preferred_device, fallback to CPU if no GPU
non_npu_preferred_device = "CPU"
else:
# Default to CPU
preferred_device = "CPU"
non_npu_preferred_device = "CPU"
return preferred_device, non_npu_preferred_device


# Create the interface
def create_interface(title: str = "Visual Pipeline and Platform Evaluation Tool"):
"""
Expand Down Expand Up @@ -636,39 +680,18 @@ def create_interface(title: str = "Visual Pipeline and Platform Evaluation Tool"
# Inference accordion
inference_accordion = gr.Accordion("Inference Parameters", open=True)

# Select preferred device for inference
# 1. If any discrete GPU, pick the one with the smallest gpu_id
# 2. If any GPU, pick the one with the smallest gpu_id
# 3. Else pick NPU
# 4. Else pick CPU
device_list = device_discovery.list_devices()
# Find discrete GPUs
discrete_gpus = [
d
for d in device_list
if d.device_family == DeviceFamily.GPU and d.device_type == DeviceType.DISCRETE
]
if discrete_gpus:
# Pick discrete GPU with smallest gpu_id
preferred_device = min(discrete_gpus, key=lambda d: d.gpu_id).device_name
else:
# Find any GPU
gpus = [d for d in device_list if d.device_family == DeviceFamily.GPU]
if gpus:
# Pick GPU with smallest gpu_id
preferred_device = min(gpus, key=lambda d: d.gpu_id).device_name
else:
# Find NPU
npus = [d for d in device_list if d.device_family == DeviceFamily.NPU]
if npus:
# Pick first NPU
preferred_device = npus[0].device_name
else:
# Default to CPU
preferred_device = "CPU"

# Get available devices for inference
# Get available devices
devices = [(device.full_device_name, device.device_name) for device in device_list]
non_npu_devices = [
(device.full_device_name, device.device_name)
for device in device_list
if device.device_family != DeviceFamily.NPU
]

# Select preferred devices
preferred_device, non_npu_preferred_device = select_preferred_devices(device_list)

# Object detection model
# Mapping of these choices to actual model path in utils.py
Expand Down Expand Up @@ -796,6 +819,22 @@ def create_interface(title: str = "Visual Pipeline and Platform Evaluation Tool"
elem_id="object_classification_reclassify_interval",
)

# Compositor device
compositor_device = gr.Dropdown(
label="Compositor Device",
choices=non_npu_devices,
value=non_npu_preferred_device,
elem_id="compositor_device",
)

# Decoder device
decoding_device = gr.Dropdown(
label="Decoding Device",
choices=devices,
value=preferred_device,
elem_id="decoding_device",
)

pipeline_watermark_enabled = gr.Checkbox(
label="Overlay inference results on inference channels",
value=True,
Expand Down Expand Up @@ -864,6 +903,8 @@ def create_interface(title: str = "Visual Pipeline and Platform Evaluation Tool"
components.add(object_classification_inference_interval)
components.add(object_classification_nireq)
components.add(object_classification_reclassify_interval)
components.add(compositor_device)
components.add(decoding_device)
components.add(pipeline_watermark_enabled)
components.add(pipeline_video_enabled)
components.add(live_preview_enabled)
Expand Down Expand Up @@ -1302,6 +1343,18 @@ def _():
object_classification_nireq.render()
object_classification_reclassify_interval.render()

# Compositor Device
@gr.render(triggers=[run_tab.select])
def _():
show_hide_component(
compositor_device,
current_pipeline[1]["parameters"]["inference"][
"compositor_device"
],
)

decoding_device.render()

# Footer
gr.HTML(
"<div class='spark-footer'>"
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
from typing import List, Tuple, Dict, Optional

# Keys for device selection
GPU_0 = "GPU_0"
GPU_N = "GPU_N"
OTHER = "OTHER"
# Placeholder for vaapi_suffix to be replaced at runtime
VAAPI_SUFFIX_PLACEHOLDER = "{vaapi_suffix}"


class PipelineElementSelectionInstructions:
def __init__(
self,
compositor: Optional[Dict[str, List[Tuple[str, str]]]] = None,
encoder: Optional[Dict[str, List[Tuple[str, str]]]] = None,
decoder: Optional[Dict[str, List[Tuple[str, str]]]] = None,
postprocessing: Optional[Dict[str, List[Tuple[str, str]]]] = None,
):
# Each field is a dict: key is GPU_0, GPU_N, or OTHER, value is list of (search, result)
self.compositor = compositor or {}
self.encoder = encoder or {}
self.decoder = decoder or {}
self.postprocessing = postprocessing or {}


class PipelineElementsSelector:
def __init__(
self,
selection_instructions: PipelineElementSelectionInstructions,
):
self.instructions = selection_instructions

def select_elements(
self,
parameters: dict,
elements: list,
) -> Tuple[Optional[str], Optional[str], Optional[str], Optional[str]]:
compositor_gpu_id, compositor_vaapi_suffix = self._select_gpu(
parameters.get("compositor_device", "")
)
detection_gpu_id, detection_vaapi_suffix = self._select_gpu(
parameters.get("object_detection_device", "")
)
decoding_gpu_id, decoding_vaapi_suffix = self._select_gpu(
parameters.get("decoding_device", "")
)

compositor_element = self._select_element(
self.instructions.compositor,
elements,
compositor_gpu_id,
compositor_vaapi_suffix,
)
encoder_element = self._select_element(
self.instructions.encoder,
elements,
detection_gpu_id,
detection_vaapi_suffix,
)
decoder_element = self._select_element(
self.instructions.decoder,
elements,
decoding_gpu_id,
decoding_vaapi_suffix,
)
postprocessing_element = self._select_element(
self.instructions.postprocessing,
elements,
detection_gpu_id,
detection_vaapi_suffix,
)

return (
compositor_element,
encoder_element,
decoder_element,
postprocessing_element,
)

@staticmethod
def _select_gpu(device: str) -> Tuple[int, Optional[str]]:
gpu_id = -1
vaapi_suffix = None

# Determine gpu_id and vaapi_suffix
# If there is only one GPU, device name is just GPU
# If there is more than one GPU, device names are like GPU.0, GPU.1, ...
if device == "GPU":
gpu_id = 0
elif device.startswith("GPU."):
try:
gpu_index = int(device.split(".")[1])
if gpu_index == 0:
gpu_id = 0
elif gpu_index > 0:
vaapi_suffix = str(128 + gpu_index)
gpu_id = gpu_index
except (IndexError, ValueError):
gpu_id = -1
else:
gpu_id = -1

return gpu_id, vaapi_suffix

@staticmethod
def _select_element(
field_dict: Dict[str, List[Tuple[str, str]]],
elements: list,
gpu_id: int,
vaapi_suffix: Optional[str],
) -> Optional[str]:
key = OTHER
if gpu_id == 0:
key = GPU_0
elif gpu_id > 0:
key = GPU_N

pairs = field_dict.get(key, [])
# Add OTHER pairs as fallback if key is not OTHER
if key != OTHER:
pairs = pairs + field_dict.get(OTHER, [])

if not pairs:
return None

for search, result in pairs:
if search == "": # to support optional parameters
return result

if VAAPI_SUFFIX_PLACEHOLDER in search or VAAPI_SUFFIX_PLACEHOLDER in result:
suffix = vaapi_suffix if vaapi_suffix is not None else ""
search = search.replace(VAAPI_SUFFIX_PLACEHOLDER, suffix)
result = result.replace(VAAPI_SUFFIX_PLACEHOLDER, suffix)
for element in elements:
if element[1] == search:
return result
return None
Original file line number Diff line number Diff line change
Expand Up @@ -34,3 +34,4 @@ parameters:
- "PaddleOCR (FP32)"
- "Vehicle Attributes Recognition Barrier 0039 (FP16)"
classification_model_default: "PaddleOCR (FP32)"
compositor_device: false
Loading