Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# Ignore all files in the evaluation directory
training

# Optional: Un-ignore specific files or directories within evaluation
# !evaluation/some-important-file.txt
# !evaluation/some-important-directory/
1 change: 1 addition & 0 deletions detection-team/evaluation/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@

Binary file not shown.
Binary file not shown.
113 changes: 113 additions & 0 deletions detection-team/evaluation/bounding_boxes_with_labels.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
image_name,class_label,xmin,ymin,xmax,ymax
image_000000001_jpg.rf.02ab6664294833e5f0e89130ecded0b8,0,343,78,403,336
image_000000001_jpg.rf.02ab6664294833e5f0e89130ecded0b8,0,344,116,415,415
image_000000001_jpg.rf.02ab6664294833e5f0e89130ecded0b8,1,30,86,60,130
image_000000001_jpg.rf.02ab6664294833e5f0e89130ecded0b8,1,62,67,329,415
image_000000002_jpg.rf.8270179e3cd29b97cf502622b381861e,0,198,202,209,254
image_000000002_jpg.rf.8270179e3cd29b97cf502622b381861e,1,10,204,69,258
image_000000002_jpg.rf.8270179e3cd29b97cf502622b381861e,1,240,207,289,278
image_000000003_jpg.rf.db8fd4730b031e35a60e0a60e17a0691,0,3,0,35,371
image_000000003_jpg.rf.db8fd4730b031e35a60e0a60e17a0691,0,23,19,84,175
image_000000003_jpg.rf.db8fd4730b031e35a60e0a60e17a0691,0,67,0,185,394
image_000000003_jpg.rf.db8fd4730b031e35a60e0a60e17a0691,0,134,40,166,164
image_000000003_jpg.rf.db8fd4730b031e35a60e0a60e17a0691,0,148,92,257,299
image_000000003_jpg.rf.db8fd4730b031e35a60e0a60e17a0691,0,165,39,200,143
image_000000003_jpg.rf.db8fd4730b031e35a60e0a60e17a0691,0,216,39,244,104
image_000000003_jpg.rf.db8fd4730b031e35a60e0a60e17a0691,0,238,153,339,355
image_000000003_jpg.rf.db8fd4730b031e35a60e0a60e17a0691,0,251,25,281,120
image_000000003_jpg.rf.db8fd4730b031e35a60e0a60e17a0691,0,280,30,304,92
image_000000003_jpg.rf.db8fd4730b031e35a60e0a60e17a0691,0,303,41,321,83
image_000000003_jpg.rf.db8fd4730b031e35a60e0a60e17a0691,1,247,0,414,303
image_000000005_jpg.rf.78faf35ae818cfb31529481b1aa24717,0,90,322,111,377
image_000000005_jpg.rf.78faf35ae818cfb31529481b1aa24717,0,124,344,149,395
image_000000005_jpg.rf.78faf35ae818cfb31529481b1aa24717,0,208,330,229,409
image_000000005_jpg.rf.78faf35ae818cfb31529481b1aa24717,0,264,326,289,395
image_000000005_jpg.rf.78faf35ae818cfb31529481b1aa24717,0,290,331,304,392
image_000000005_jpg.rf.78faf35ae818cfb31529481b1aa24717,0,310,330,332,396
image_000000005_jpg.rf.78faf35ae818cfb31529481b1aa24717,0,316,337,332,392
image_000000005_jpg.rf.78faf35ae818cfb31529481b1aa24717,0,340,331,360,380
image_000000005_jpg.rf.78faf35ae818cfb31529481b1aa24717,0,353,342,366,375
image_000000005_jpg.rf.78faf35ae818cfb31529481b1aa24717,1,0,310,111,415
image_000000010_jpg.rf.bd4b622b644255e6c229402ae5a7b8a2,0,60,142,109,414
image_000000010_jpg.rf.bd4b622b644255e6c229402ae5a7b8a2,1,254,143,385,239
image_000000010_jpg.rf.bd4b622b644255e6c229402ae5a7b8a2,1,354,88,415,198
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,0,11,182,22,212
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,0,20,176,31,212
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,0,31,175,43,209
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,0,38,173,53,209
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,0,76,173,82,204
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,0,82,167,93,205
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,0,91,163,106,202
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,0,104,169,123,200
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,0,124,160,135,202
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,0,135,161,150,199
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,0,220,155,235,193
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,0,245,154,255,189
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,1,0,173,22,190
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,1,39,173,71,189
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,1,145,169,177,190
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,1,203,166,222,183
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,1,218,164,271,181
image_000000013_jpg.rf.596954d88b03ca3af5793b6fdcaa49eb,1,267,138,374,220
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,24,393,43,414
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,31,400,42,414
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,40,376,59,415
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,42,277,52,302
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,59,381,69,414
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,69,353,86,401
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,84,336,99,383
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,95,335,104,376
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,103,381,111,414
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,104,327,114,359
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,115,329,128,349
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,140,359,153,385
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,151,360,171,393
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,168,353,178,400
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,192,308,201,338
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,262,328,278,351
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,0,282,307,289,331
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,1,310,321,356,339
image_000000017_jpg.rf.607c0082ee6d37a3192c7a9966bffc69,1,342,331,415,378
image_000000019_jpg.rf.ccbdc59e6b469d5c0b0ef8bae0f9d62b,0,163,26,166,41
image_000000019_jpg.rf.ccbdc59e6b469d5c0b0ef8bae0f9d62b,0,168,20,173,41
image_000000019_jpg.rf.ccbdc59e6b469d5c0b0ef8bae0f9d62b,0,180,23,184,47
image_000000019_jpg.rf.ccbdc59e6b469d5c0b0ef8bae0f9d62b,0,259,24,282,104
image_000000019_jpg.rf.ccbdc59e6b469d5c0b0ef8bae0f9d62b,0,281,17,299,80
image_000000019_jpg.rf.ccbdc59e6b469d5c0b0ef8bae0f9d62b,0,385,32,415,149
image_000000019_jpg.rf.ccbdc59e6b469d5c0b0ef8bae0f9d62b,1,17,17,32,39
image_000000019_jpg.rf.ccbdc59e6b469d5c0b0ef8bae0f9d62b,1,81,23,197,97
image_000000019_jpg.rf.ccbdc59e6b469d5c0b0ef8bae0f9d62b,1,186,26,206,45
image_000000019_jpg.rf.ccbdc59e6b469d5c0b0ef8bae0f9d62b,1,234,27,253,46
image_000000019_jpg.rf.ccbdc59e6b469d5c0b0ef8bae0f9d62b,1,251,30,265,39
image_000000019_jpg.rf.ccbdc59e6b469d5c0b0ef8bae0f9d62b,1,297,34,315,49
image_000000019_jpg.rf.ccbdc59e6b469d5c0b0ef8bae0f9d62b,1,330,36,386,81
image_000000019_jpg.rf.ccbdc59e6b469d5c0b0ef8bae0f9d62b,1,386,53,403,77
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,0,352,151,377,230
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,0,29,168,47,237
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,0,55,143,82,182
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,0,100,155,111,181
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,0,115,78,126,102
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,0,117,158,131,199
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,0,137,158,147,206
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,0,216,151,222,164
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,0,237,150,244,169
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,0,247,151,253,168
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,0,260,158,276,202
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,0,316,156,330,203
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,0,339,150,347,169
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,0,354,146,361,191
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,0,394,149,399,167
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,0,405,161,415,281
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,1,0,162,24,190
image_000000022_jpg.rf.7d5b8cf7ee87ad3a595adf8d1b35277d,1,378,156,401,168
image_000000024_jpg.rf.6a4bf4926a9ffe6f714f52909f148563,0,205,270,225,330
image_000000024_jpg.rf.6a4bf4926a9ffe6f714f52909f148563,0,237,271,254,336
image_000000024_jpg.rf.6a4bf4926a9ffe6f714f52909f148563,1,140,319,165,354
image_000000024_jpg.rf.6a4bf4926a9ffe6f714f52909f148563,1,166,330,175,348
image_000000024_jpg.rf.6a4bf4926a9ffe6f714f52909f148563,1,173,332,183,350
image_000000024_jpg.rf.6a4bf4926a9ffe6f714f52909f148563,1,179,327,197,352
image_000000024_jpg.rf.6a4bf4926a9ffe6f714f52909f148563,1,186,317,192,322
image_000000024_jpg.rf.6a4bf4926a9ffe6f714f52909f148563,1,193,330,202,351
image_000000024_jpg.rf.6a4bf4926a9ffe6f714f52909f148563,1,193,326,199,330
image_000000024_jpg.rf.6a4bf4926a9ffe6f714f52909f148563,1,202,317,205,326
image_000000024_jpg.rf.6a4bf4926a9ffe6f714f52909f148563,1,202,333,209,343
73 changes: 73 additions & 0 deletions detection-team/evaluation/compare_gpu_utilization_csvs.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd\n",
"\n",
"# List of CSV file paths\n",
"file_paths = [\n",
" 'gpu_utilization_detectron2.csv',\n",
" 'gpu_utilization_ResNet.csv',\n",
" 'gpu_utilization_GroundingDINO.csv',\n",
" 'gpu_utilization_YOLO.csv'\n",
"]\n",
"\n",
"# Read the CSV files into DataFrames\n",
"dfs = [pd.read_csv(file_path) for file_path in file_paths]\n",
"\n",
"# Merge DataFrames on the 'timestamp' column\n",
"merged_df = pd.concat(dfs, keys=['Detectron2', 'ResNet', 'GroundingDINO', 'YOLO'], names=['File', 'Index'])\n",
"\n",
"# Reset index to have a flat DataFrame\n",
"merged_df = merged_df.reset_index(level=0)\n",
"\n",
"# Perform comparisons\n",
"# Example: Calculate the mean utilization for each file and compare\n",
"mean_utilization = merged_df.groupby('File').agg({\n",
" 'gpu_utilization': 'mean',\n",
" 'memory_utilization': 'mean',\n",
" 'encoder_utilization': 'mean',\n",
" 'decoder_utilization': 'mean'\n",
"})\n",
"\n",
"print(\"Mean Utilization Comparison:\")\n",
"print(mean_utilization)\n",
"\n",
"# Calculate the sum of utilization for each file and compare\n",
"sum_utilization = merged_df.groupby('File').agg({\n",
" 'gpu_utilization': 'sum',\n",
" 'memory_utilization': 'sum',\n",
" 'encoder_utilization': 'sum',\n",
" 'decoder_utilization': 'sum'\n",
"})\n",
"\n",
"print(\"\\nSum Utilization Comparison:\")\n",
"print(sum_utilization)\n",
"\n",
"# Example: Find differences in GPU utilization between files\n",
"comparison_df = merged_df.pivot(index='timestamp', columns='File', values='gpu_utilization')\n",
"comparison_df['Difference_Detectron2_ResNet'] = comparison_df['Detectron2'] - comparison_df['ResNet']\n",
"comparison_df['Difference_Detectron2_GroundingDINO'] = comparison_df['Detectron2'] - comparison_df['GroundingDINO']\n",
"comparison_df['Difference_Detectron2_YOLO'] = comparison_df['Detectron2'] - comparison_df['YOLO']\n",
"\n",
"print(\"\\nGPU Utilization Differences:\")\n",
"print(comparison_df)\n",
"\n",
"# Save the comparison results to CSV\n",
"mean_utilization.to_csv('mean_utilization_comparison.csv')\n",
"comparison_df.to_csv('gpu_utilization_differences.csv')\n"
]
}
],
"metadata": {
"language_info": {
"name": "python"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
58 changes: 58 additions & 0 deletions detection-team/evaluation/create_data_csv.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
# Import necessary libraries
import os
import cv2
import pandas as pd
import warnings

# Suppress warnings
warnings.filterwarnings("ignore")

# Setup logger for Detectron2

# Global variables
Selected_Fold = 2 # 0..2

# Function to get image dimensions
def get_image_dimensions(image_dir, image_name):
image_path = os.path.join(image_dir, image_name + ".jpg") # Assuming images are .jpg files
image = cv2.imread(image_path)
if image is None:
raise FileNotFoundError(f"Image {image_path} not found.")
height, width = image.shape[:2]
return width, height

# Function to extract bounding boxes from labels
def extract_bounding_boxes(labels_dir, image_dir):
data = []
for filename in os.listdir(labels_dir):
if filename.endswith(".txt"): # Assuming the bounding box files have .txt extension
image_name = os.path.splitext(filename)[0]
width, height = get_image_dimensions(image_dir, image_name)
with open(os.path.join(labels_dir, filename), 'r') as file:
for line in file:
parts = line.strip().split()
if len(parts) == 5: # Assuming the format includes class label
class_label, x_center, y_center, bbox_width, bbox_height = map(float, parts)
xmin = int((x_center - bbox_width / 2) * width)
ymin = int((y_center - bbox_height / 2) * height)
xmax = int((x_center + bbox_width / 2) * width)
ymax = int((y_center + bbox_height / 2) * height)
data.append([image_name, int(class_label), xmin, ymin, xmax, ymax])
else:
print(f"Unexpected format in file: {filename}")
return data

# Function to create CSV file from labels
def create_csv(labels_dir, image_dir, output_csv):
data = extract_bounding_boxes(labels_dir, image_dir)
df = pd.DataFrame(data, columns=["image_name", "class_label", "xmin", "ymin", "xmax", "ymax"])
df.to_csv(output_csv, index=False)
print(f"CSV file '{output_csv}' created successfully.")

#generate CSV file for the dataset
if __name__=='__main__':
path_to_data="data_for_evaluation"
labels_directory = path_to_data + "/labels" # Path to labels directory
image_directory = path_to_data + "/images" # Path to images directory
output_csv_file = "bounding_boxes_with_labels.csv" # Output CSV file name
create_csv(labels_directory, image_directory, output_csv_file)
4 changes: 0 additions & 4 deletions detection-team/evaluation/detectron2-evaluation.py

This file was deleted.

95 changes: 95 additions & 0 deletions detection-team/evaluation/detectron2-gpu-utilization-evaluation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
import os
import cv2
import torch
import pandas as pd
import time

from detectron2_for_evaluation.config import get_cfg
from detectron2_for_evaluation.engine.defaults import DefaultPredictor
from detectron2_for_evaluation import model_zoo

def setup_predictor():
cfg = get_cfg()
cfg.merge_from_file(model_zoo.get_config_file("COCO-Detection/faster_rcnn_R_50_FPN_3x.yaml"))
cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url("COCO-Detection/faster_rcnn_R_50_FPN_3x.yaml")
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.5

# Check if CUDA (GPU) is available, else use CPU
if torch.cuda.is_available():
cfg.MODEL.DEVICE = 'cuda'
else:
cfg.MODEL.DEVICE = 'cpu'

return DefaultPredictor(cfg)

def get_predictions(predictor, image_path):
im = cv2.imread(image_path)
outputs = predictor(im)
return outputs["instances"].to("cpu") # Ensure this is set to CPU

def load_ground_truth(csv_file):
df = pd.read_csv(csv_file)
ground_truth = {}
for _, row in df.iterrows():
image_name = row["image_name"]
xmin, ymin, xmax, ymax = int(row["xmin"]), int(row["ymin"]), int(row["xmax"]), int(row["ymax"])
if image_name not in ground_truth:
ground_truth[image_name] = []
ground_truth[image_name].append((xmin, ymin, xmax, ymax))
return ground_truth

def check_overlap(box1, box2):
x1, y1, x2, y2 = box1
x1_p, y1_p, x2_p, y2_p = box2

# Check if there is any overlap
if x1 < x2_p and x2 > x1_p and y1 < y2_p and y2 > y1_p:
return True
return False

def compute_accuracy(ground_truth, predictor, image_dir):
start_time = time.time() # Start timing

total_true_positive = 0
total_ground_truth = 0

for image_name, gt_boxes in ground_truth.items():
image_path = os.path.join(image_dir, image_name + ".jpg")
predictions = get_predictions(predictor, image_path)

pred_boxes = predictions.pred_boxes.tensor.numpy()

# Check if any predictions overlap with ground truth
for gt_box in gt_boxes:
gt_xmin, gt_ymin, gt_xmax, gt_ymax = gt_box
match_found = False
for pred_box in pred_boxes:
if check_overlap((gt_xmin, gt_ymin, gt_xmax, gt_ymax), tuple(pred_box)):
match_found = True
break
if match_found:
total_true_positive += 1
total_ground_truth += 1

accuracy = total_true_positive / total_ground_truth if total_ground_truth > 0 else 0

end_time = time.time() # End timing
elapsed_time = end_time - start_time
return accuracy
#print(f"Accuracy: {accuracy:.2f}")
#print(f"Time taken: {elapsed_time:.2f} seconds")

def get_accuracy_option1(path_to_data):
csv_file_path = "bounding_boxes_with_labels.csv" # path to your CSV file
image_directory = path_to_data+"/images" # path to your images directory

predictor = setup_predictor()

# Load ground truth and compute accuracy
ground_truth = load_ground_truth(csv_file_path)
return compute_accuracy(ground_truth, predictor, image_directory)


def detectron2_evaluation(path_to_data):
# make_coco_dataset(path_to_data)
return (get_accuracy_option1(path_to_data))#,get_accuracy_option2(path_to_data))
Binary file not shown.
Binary file not shown.
Binary file not shown.
Loading