Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
from dataclasses import dataclass
from typing import Optional

from deepness.common.processing_parameters.map_processing_parameters import MapProcessingParameters


@dataclass
class ClassifyChipParameters(MapProcessingParameters):
"""
Parameters for Classifying Chips obtained from UI.
"""

raster_id: Optional[str] # id for map layer
vector_id: Optional[str] # id for bounding box layer
config: dict # model configuration
# config keys: 'model_name', 'weights_ckpt', 'class_names', 'normalization_mean',
# 'normalization_std', 'model_arch', 'n_channels', 'image_size'
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@
from dataclasses import dataclass

from deepness.common.processing_parameters.map_processing_parameters import MapProcessingParameters
from deepness.processing.models.model_base import ModelBase


@dataclass
class DetectorTypeParameters:
Expand All @@ -24,6 +22,7 @@ class DetectorType(enum.Enum):
YOLO_ULTRALYTICS = 'YOLO_Ultralytics'
YOLO_ULTRALYTICS_SEGMENTATION = 'YOLO_Ultralytics_segmentation'
YOLO_ULTRALYTICS_OBB = 'YOLO_Ultralytics_obb'
RT_DETR = 'RT_DETR'

def get_parameters(self):
if self == DetectorType.YOLO_v5_v7_DEFAULT:
Expand All @@ -42,6 +41,10 @@ def get_parameters(self):
has_inverted_output_shape=True,
skipped_objectness_probability=True,
)
elif self == DetectorType.RT_DETR:
return DetectorTypeParameters(
skipped_objectness_probability=True,
)
else:
raise ValueError(f'Unknown detector type: {self}')

Expand All @@ -61,7 +64,7 @@ class DetectionParameters(MapProcessingParameters):
"""
Parameters for Inference of detection model (including pre/post-processing) obtained from UI.
"""

from deepness.processing.models.model_base import ModelBase
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why import here instead of at the top of the file?

model: ModelBase # wrapper of the loaded model

confidence: float
Expand Down
62 changes: 60 additions & 2 deletions src/deepness/deepness.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,23 +8,27 @@
import logging
import traceback

from qgis.core import Qgis, QgsApplication, QgsProject, QgsVectorLayer
from qgis.core import Qgis, QgsApplication, QgsProject, QgsVectorLayer, QgsField, QgsWkbTypes
from qgis.gui import QgisInterface
from qgis.PyQt.QtCore import QCoreApplication, Qt
from qgis.PyQt.QtCore import QCoreApplication, Qt, QVariant
from qgis.PyQt.QtGui import QIcon
from qgis.PyQt.QtWidgets import QAction, QMessageBox

from deepness.common.defines import IS_DEBUG, PLUGIN_NAME
from deepness.common.lazy_package_loader import LazyPackageLoader
from deepness.common.processing_parameters.map_processing_parameters import MapProcessingParameters, ProcessedAreaType
from deepness.common.processing_parameters.training_data_export_parameters import TrainingDataExportParameters
from deepness.common.processing_parameters.classify_chip_parameters import ClassifyChipParameters

from deepness.deepness_dockwidget import DeepnessDockWidget
from deepness.dialogs.resizable_message_box import ResizableMessageBox
from deepness.images.get_image_path import get_icon_path
from deepness.processing.map_processor.map_processing_result import (MapProcessingResult, MapProcessingResultCanceled,
MapProcessingResultFailed,
MapProcessingResultSuccess)
from deepness.processing.map_processor.map_processor_training_data_export import MapProcessorTrainingDataExport
from deepness.processing.classify_chip import ClassifyChipTask


cv2 = LazyPackageLoader('cv2')

Expand Down Expand Up @@ -167,6 +171,9 @@ def onClosePlugin(self):
# when closing the docked window:
# self.dockwidget = None

# QgsProject.instance().layersAdded.disconnect(self.dockwidget._refresh_bbox_combo)
# QgsProject.instance().layersRemoved.disconnect(self.dockwidget._refresh_bbox_combo)

self.pluginIsActive = False

def unload(self):
Expand Down Expand Up @@ -203,6 +210,7 @@ def run(self):
self.dockwidget.closingPlugin.connect(self.onClosePlugin)
self.dockwidget.run_model_inference_signal.connect(self._run_model_inference)
self.dockwidget.run_training_data_export_signal.connect(self._run_training_data_export)
self.dockwidget.run_classify_chip_signal.connect(self._classify_chip)

self.iface.addDockWidget(Qt.RightDockWidgetArea, self.dockwidget)
self.dockwidget.show()
Expand Down Expand Up @@ -250,6 +258,56 @@ def _run_training_data_export(self, training_data_export_parameters: TrainingDat
QgsApplication.taskManager().addTask(self._map_processor)
self._display_processing_started_info()

def _classify_chip(self, classify_chip_parameters: ClassifyChipParameters):
raster_id = classify_chip_parameters.raster_id
vector_id = classify_chip_parameters.vector_id
cfg = classify_chip_parameters.config
task = ClassifyChipTask(raster_id, vector_id, cfg)

def on_done(ok, task=task):
if not ok or not task.results:
print("Classification task failed or empty results.")
return

# temporary memory layer
src = task.vec # already reprojected bboxes
geom_name = QgsWkbTypes.displayString(src.wkbType())
dup = QgsVectorLayer(f"{geom_name}?crs={src.crs().authid()}", task.OUT_LAYER_NAME, "memory")

# copy original attributes and features
prov = dup.dataProvider()
prov.addAttributes(src.fields())
dup.updateFields()
prov.addFeatures(list(src.getFeatures()))

# add custom attribute fields
dup.startEditing()
dup.addAttribute(QgsField(task.ATTR_LABEL_FIELD, QVariant.String)) # label
for cname in task.CLASS_NAMES: # class probabilities
dup.addAttribute(QgsField(f"{task.ATTR_PROB_PREFIX}{cname}", QVariant.Double))
dup.updateFields()

# fill attribute fields by feature id (fid)
for f in dup.getFeatures():
fid = f.id()
if fid in task.results:
label, probs = task.results[fid]
f[task.ATTR_LABEL_FIELD] = label
for j, cname in enumerate(task.CLASS_NAMES):
f[f"{task.ATTR_PROB_PREFIX}{cname}"] = float(probs[j])
dup.updateFeature(f)
dup.commitChanges()

# add temporary layer to QGIS
QgsProject.instance().addMapLayer(dup)
print(f"Added prediction layer: {task.OUT_LAYER_NAME} with fields: "
f"{task.ATTR_LABEL_FIELD}, {[task.ATTR_PROB_PREFIX + c for c in task.CLASS_NAMES]}")

task.taskCompleted.connect(lambda: on_done(True))
task.taskTerminated.connect(lambda: on_done(False))
QgsApplication.taskManager().addTask(task)
self._display_processing_started_info()

def _run_model_inference(self, params: MapProcessingParameters):
from deepness.processing.models.model_types import ModelDefinition # import here to avoid pulling external dependencies to early

Expand Down
109 changes: 109 additions & 0 deletions src/deepness/deepness_dockwidget.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,12 @@
import os
from typing import Optional

from qgis.gui import QgsCollapsibleGroupBox
from qgis.core import Qgis, QgsMapLayerProxyModel, QgsProject
from qgis.PyQt import QtWidgets, uic
from qgis.PyQt.QtCore import pyqtSignal
from qgis.PyQt.QtWidgets import QComboBox, QFileDialog, QMessageBox
from qgis.PyQt.QtWidgets import QFormLayout, QWidget, QLineEdit, QPushButton, QSpinBox

from deepness.common.config_entry_key import ConfigEntryKey
from deepness.common.defines import IS_DEBUG, PLUGIN_NAME
Expand All @@ -23,6 +25,7 @@
from deepness.common.processing_parameters.segmentation_parameters import SegmentationParameters
from deepness.common.processing_parameters.superresolution_parameters import SuperresolutionParameters
from deepness.common.processing_parameters.training_data_export_parameters import TrainingDataExportParameters
from deepness.common.processing_parameters.classify_chip_parameters import ClassifyChipParameters
from deepness.processing.models.model_base import ModelBase
from deepness.widgets.input_channels_mapping.input_channels_mapping_widget import InputChannelsMappingWidget
from deepness.widgets.training_data_export_widget.training_data_export_widget import TrainingDataExportWidget
Expand All @@ -43,6 +46,7 @@ class DeepnessDockWidget(QtWidgets.QDockWidget, FORM_CLASS):
closingPlugin = pyqtSignal()
run_model_inference_signal = pyqtSignal(MapProcessingParameters) # run Segmentation or Detection
run_training_data_export_signal = pyqtSignal(TrainingDataExportParameters)
run_classify_chip_signal = pyqtSignal(ClassifyChipParameters)

def __init__(self, iface, parent=None):
super(DeepnessDockWidget, self).__init__(parent)
Expand Down Expand Up @@ -142,6 +146,57 @@ def _save_ui_to_config(self):

def _rlayer_updated(self):
self._input_channels_mapping_widget.set_rlayer(self._get_input_layer())

####################### Helper functions for classification begins #######################
def _mk_layer_combo(self, filter_model):
combo = self._new_layer_combo(filter_model)
return combo

def _new_layer_combo(self, filter_model):
c = self.mMapLayerComboBox_areaMaskLayer.__class__(self) # duplicate widget
c.setFilters(filter_model) # filter for vector layers (bounding box layers)
return c

# def _refresh_bbox_combo(self, *args):
# # QgsMapLayerComboBox refreshes automatically; nothing to do here
# pass

def _pick_ckpt(self, lineedit):
fp, _ = QFileDialog.getOpenFileName(self, "Pick weights checkpoint", "", "PyTorch (*.pt *.pth);;All (*)")
if fp:
lineedit.setText(fp)

def _parse_strings(self, s):
return [x.strip() for x in s.split(",") if x.strip()]

def _parse_floats(self, s):
return [float(x.strip()) for x in s.split(",") if x.strip()]

def _emit_classify_chip(self):
raster = self.mMapLayerComboBox_inputLayer.currentLayer() # pre-selected input data
bbox = self.cmb_bbox.currentLayer() # list of bounding box layers
if raster is None or bbox is None:
QMessageBox.warning(self, "Deepness", "Pick raster and vector (bounding box) layer.")
return
cfg = {
'model_name': None,
'weights_ckpt': self.le_ckpt.text(),
'class_names': self._parse_strings(self.le_classes.text()),
'normalization_mean': self._parse_floats(self.le_mean.text()),
'normalization_std': self._parse_floats(self.le_std.text()),
'model_arch': self.le_arch.text(),
'n_channels': int(self.sb_ch.value()),
'image_size': int(self.sb_size.value()),
}
map_processing_parameters = self._get_map_processing_parameters()
params = ClassifyChipParameters(
**map_processing_parameters.__dict__,
raster_id= raster.id(),
vector_id=bbox.id(),
config=cfg,
)
self.run_classify_chip_signal.emit(params)
####################### Helper functions for classification ends #######################

def _setup_misc_ui(self):
""" Setup some misceleounous ui forms
Expand Down Expand Up @@ -171,6 +226,60 @@ def _setup_misc_ui(self):
self.comboBox_detectorType.addItem(detector_type)
self._detector_type_changed()

####################### GUI for classification begins #######################
# set up panel
self.group_chip = QgsCollapsibleGroupBox("Chip classification on input data selected above", self)
self.group_chip.setCollapsed(False)
form = QFormLayout(self.group_chip)

# select vector (bounding box) layer
self.cmb_bbox = self._mk_layer_combo(filter_model=QgsMapLayerProxyModel.VectorLayer)
form.addRow("Bounding boxes:", self.cmb_bbox)

# select path to model checkpoint
self.le_ckpt = QLineEdit('models/d4-cls-exp1_6ch-3cat-48px-newval_best.pt')
btn_ckpt = QPushButton("Browse…")
btn_ckpt.clicked.connect(lambda: self._pick_ckpt(self.le_ckpt))
row_ckpt = QtWidgets.QHBoxLayout() # arrange widgets left to right
row_ckpt.addWidget(self.le_ckpt)
row_ckpt.addWidget(btn_ckpt)
wrap_ckpt = QWidget() # wrap the row of widgets into one
wrap_ckpt.setLayout(row_ckpt)
form.addRow("Model file path (.pt):", wrap_ckpt)

# select model configuration (with default values)
self.le_mname = QLineEdit('Tree Classifier')
self.le_classes = QLineEdit('deadtree,topdownthreat,tree')
self.le_mean = QLineEdit('0.358034,0.492683,0.479417,0.502045,0.499618,0.536634')
self.le_std = QLineEdit('0.213879,0.261817,0.220773,0.279437,0.281337,0.199962')
self.le_arch = QLineEdit('tf_efficientnet_b4.ns_jft_in1k')
self.sb_ch = QSpinBox(); self.sb_ch.setRange(1, 12); self.sb_ch.setValue(6)
self.sb_size = QSpinBox(); self.sb_size.setRange(16, 1024); self.sb_size.setValue(48)

form.addRow("Model Name:", self.le_mname)
form.addRow("Class Names:", self.le_classes)
form.addRow("Normalization Mean:", self.le_mean)
form.addRow("Normalization Std:", self.le_std)
form.addRow("Model architecture:", self.le_arch)
form.addRow("Number of Channels:", self.sb_ch)
form.addRow("Image Size:", self.sb_size)

# button to run classification
btn_run_chip = QPushButton("Classify Chips")
btn_run_chip.clicked.connect(self._emit_classify_chip)
form.addRow("", btn_run_chip)

# insert the entire block just below "Training data export" block
export_block = self.verticalLayout_trainingDataExport.parentWidget()
layout = self.verticalLayout_3
layout.insertWidget(layout.indexOf(export_block)+1, self.group_chip)

# # keep comboboxes synced with project layers
# QgsProject.instance().layersAdded.connect(self._refresh_bbox_combo)
# QgsProject.instance().layersRemoved.connect(self._refresh_bbox_combo)
# self._refresh_bbox_combo()
####################### GUI for classification ends #######################

self._rlayer_updated() # to force refresh the dependant ui elements

def _set_processed_area_mask_options(self):
Expand Down
2 changes: 1 addition & 1 deletion src/deepness/metadata.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
name=Deepness: Deep Neural Remote Sensing
qgisMinimumVersion=3.22
description=Inference of deep neural network models (ONNX) for segmentation, detection and regression
version=0.7.0
version=0.6.5
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please increase the version number to 0.8.0

Copy link

Copilot AI Aug 14, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Version appears to be downgraded from 0.7.0 to 0.6.5. This could indicate an error in version management or merge conflict.

Suggested change
version=0.6.5
version=0.7.0

Copilot uses AI. Check for mistakes.
author=PUT Vision
email=przemyslaw.aszkowski@gmail.com

Expand Down
Loading
Loading