Skip to content
This repository has been archived by the owner on Dec 19, 2024. It is now read-only.

Commit

Permalink
Spectral Analysis (#203)
Browse files Browse the repository at this point in the history
  • Loading branch information
Saurav-D authored Apr 7, 2022
1 parent 7c62d0c commit fe9bc1a
Show file tree
Hide file tree
Showing 40 changed files with 1,139 additions and 956 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ repos:
- id: check-added-large-files
- id: check-merge-conflict
- repo: https://github.com/psf/black
rev: 19.10b0
rev: 22.3.0
hooks:
- id: black
- repo: https://gitlab.com/pycqa/flake8
Expand Down
4 changes: 3 additions & 1 deletion datasetinsights/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,9 @@


@click.command(
cls=Entrypoint, help="Dataset Insights.", context_settings=CONTEXT_SETTINGS,
cls=Entrypoint,
help="Dataset Insights.",
context_settings=CONTEXT_SETTINGS,
)
@click.option(
"-v",
Expand Down
3 changes: 1 addition & 2 deletions datasetinsights/commands/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@


class Entrypoint(click.MultiCommand):
""" Click MultiCommand Entrypoint For Datasetinsights CLI
"""
"""Click MultiCommand Entrypoint For Datasetinsights CLI"""

def list_commands(self, ctx):
"""Dynamically get the list of commands."""
Expand Down
3 changes: 1 addition & 2 deletions datasetinsights/commands/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,7 @@
),
)
def cli(input, output, format):
"""Convert dataset from Perception format to target format.
"""
"""Convert dataset from Perception format to target format."""
ctx = click.get_current_context()
logger.debug(f"Called convert command with parameters: {ctx.params}")

Expand Down
13 changes: 9 additions & 4 deletions datasetinsights/commands/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,7 @@ class SourceURI(click.ParamType):
PREFIX_PATTERN = r"^gs://|^http(s)?://|^usim://"

def convert(self, value, param, ctx):
""" Validate source URI and Converts the value.
"""
"""Validate source URI and Converts the value."""
match = re.search(self.PREFIX_PATTERN, value)
if not match:
message = (
Expand All @@ -36,7 +35,9 @@ def convert(self, value, param, ctx):
return value


@click.command(context_settings=const.CONTEXT_SETTINGS,)
@click.command(
context_settings=const.CONTEXT_SETTINGS,
)
@click.option(
"-s",
"--source-uri",
Expand Down Expand Up @@ -82,7 +83,11 @@ def convert(self, value, param, ctx):
"integrity of the downloaded dataset.",
)
def cli(
source_uri, output, include_binary, access_token, checksum_file,
source_uri,
output,
include_binary,
access_token,
checksum_file,
):
"""Download datasets to localhost from known locations.
Expand Down
11 changes: 6 additions & 5 deletions datasetinsights/dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@


def main_layout():
""" Method for generating main app layout.
"""Method for generating main app layout.
Returns:
html layout: main layout design with tabs for overview statistics
Expand All @@ -39,7 +39,8 @@ def main_layout():
value="dataset_overview",
children=[
dcc.Tab(
label="Overview", value="dataset_overview",
label="Overview",
value="dataset_overview",
),
dcc.Tab(
label="Object Detection",
Expand Down Expand Up @@ -68,7 +69,7 @@ def main_layout():
Output("data_root_value", "children"), [Input("dropdown", "value")]
)
def store_data_root(value):
""" Method for storing data-root value in a hidden division.
"""Method for storing data-root value in a hidden division.
Returns:
json : data-root encoded in json to be stored in data_root_value div.
Expand All @@ -83,7 +84,7 @@ def store_data_root(value):
[Input("page_tabs", "value"), Input("data_root_value", "children")],
)
def render_content(value, json_data_root):
""" Method for rendering dashboard layout based
"""Method for rendering dashboard layout based
on the selected tab value.
Args:
Expand All @@ -102,7 +103,7 @@ def render_content(value, json_data_root):


def check_path(path):
""" Method for checking if the given data-root path is valid or not."""
"""Method for checking if the given data-root path is valid or not."""
if os.path.isdir(path):
return path
else:
Expand Down
2 changes: 1 addition & 1 deletion datasetinsights/datasets/exceptions.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
class DatasetNotFoundError(Exception):
""" Raise when a dataset file can't be found."""
"""Raise when a dataset file can't be found."""
2 changes: 1 addition & 1 deletion datasetinsights/datasets/synthetic.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@


def read_bounding_box_3d(annotation, label_mappings=None):
""" Convert dictionary representations of 3d bounding boxes into objects
"""Convert dictionary representations of 3d bounding boxes into objects
of the BBox3d class
Args:
Expand Down
3 changes: 1 addition & 2 deletions datasetinsights/datasets/transformers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,7 @@ def get_dataset_transformer(format, **kwargs):


class DatasetTransformer(ABC):
""" Base class for all dataset transformer.
"""
"""Base class for all dataset transformer."""

REGISTRY = {}

Expand Down
2 changes: 1 addition & 1 deletion datasetinsights/datasets/unity_perception/captures.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ class Captures:
FILE_PATTERN = DATASET_TABLES[TABLE_NAME].file

def __init__(self, data_root=DEFAULT_DATA_ROOT, version=SCHEMA_VERSION):
""" Initialize Captures
"""Initialize Captures
Args:
data_root (str): the root directory of the dataset
Expand Down
3 changes: 1 addition & 2 deletions datasetinsights/datasets/unity_perception/exceptions.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
class DefinitionIDError(Exception):
""" Raise when a given definition id can't be found.
"""
"""Raise when a given definition id can't be found."""
7 changes: 3 additions & 4 deletions datasetinsights/datasets/unity_perception/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class Metrics:
FILE_PATTERN = DATASET_TABLES[TABLE_NAME].file

def __init__(self, data_root=DEFAULT_DATA_ROOT, version=SCHEMA_VERSION):
""" Initialize Metrics
"""Initialize Metrics
Args:
data_root (str): the root directory of the dataset containing
Expand Down Expand Up @@ -73,7 +73,7 @@ def _load_metrics(self, data_root, version):

@staticmethod
def _normalize_values(metric):
""" Filter unnecessary info from metric.
"""Filter unnecessary info from metric.
1-level faltten of metrics.values column.
"""
values = metric["values"]
Expand Down Expand Up @@ -115,8 +115,7 @@ def filter_metrics(self, def_id):

@staticmethod
def _load_json(filename, table_name, version):
"""Load records from json files into a dict
"""
"""Load records from json files into a dict"""
with open(filename, "r", encoding="utf8") as file:
data = json.load(file)
verify_version(data, version)
Expand Down
26 changes: 13 additions & 13 deletions datasetinsights/datasets/unity_perception/references.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ class AnnotationDefinitions:
FILE_PATTERN = DATASET_TABLES[TABLE_NAME].file

def __init__(self, data_root, version=SCHEMA_VERSION):
""" Initialize AnnotationDefinitions
"""Initialize AnnotationDefinitions
Args:
data_root (str): the root directory of the dataset containing
Expand Down Expand Up @@ -85,7 +85,7 @@ def get_definition(self, def_id):
return definition

def find_by_name(self, pattern):
""" Get the annotation definition by matching patterns
"""Get the annotation definition by matching patterns
This method will try to match the pattern of the annotation definition
by name to determine
Expand Down Expand Up @@ -136,7 +136,7 @@ class MetricDefinitions:
FILE_PATTERN = DATASET_TABLES[TABLE_NAME].file

def __init__(self, data_root, version=SCHEMA_VERSION):
""" Initialize MetricDefinitions
"""Initialize MetricDefinitions
Args:
data_root (str): the root directory of the dataset containing
tables
Expand All @@ -147,17 +147,17 @@ def __init__(self, data_root, version=SCHEMA_VERSION):
def load_metric_definitions(self, data_root, version):
"""Load metric definition files.
:ref:`metric_definitions.json`
:ref:`metric_definitions.json`
Args:
data_root (str): the root directory of the dataset containing tables
version (str): desired schema version
Args:
data_root (str): the root dir of the dataset containing tables
version (str): desired schema version
Returns:
A Pandas dataframe with metric definition records.
a collection of metric_definitions records with columns: id
(id for metric definition), name, description, spec (definition specific
spec)
Returns:
A Pandas dataframe with metric definition records.
a collection of metric_definitions records with columns: id
(id for metric definition), name, description, spec (definition specific
spec)
"""
definitions = []
for def_file in glob(data_root, self.FILE_PATTERN):
Expand Down Expand Up @@ -255,7 +255,7 @@ class Sensors:
FILE_PATTERN = DATASET_TABLES[TABLE_NAME].file

def __init__(self, data_root, version=SCHEMA_VERSION):
""" Initialize Sensors
"""Initialize Sensors
Args:
data_root (str): the root directory of the dataset containing
Expand Down
8 changes: 3 additions & 5 deletions datasetinsights/datasets/unity_perception/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,13 @@ class VersionError(Exception):


class DuplicateRecordError(Exception):
""" Raise when the definition file has duplicate definition id
"""
"""Raise when the definition file has duplicate definition id"""

pass


class NoRecordError(Exception):
""" Raise when no record is found matching a given definition id
"""
"""Raise when no record is found matching a given definition id"""

pass

Expand All @@ -39,7 +37,7 @@ def verify_version(json_data, version):


def check_duplicate_records(table, column, table_name):
""" Check if table has duplicate records for a given column
"""Check if table has duplicate records for a given column
Args:
table (pd.DataFrame): a pandas dataframe
Expand Down
2 changes: 1 addition & 1 deletion datasetinsights/io/bbox.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ class BBox2D:
"""

def __init__(self, label, x, y, w, h, score=1.0):
""" Initialize 2D bounding box object
"""Initialize 2D bounding box object
Args:
label (str): string representation of the label
Expand Down
30 changes: 14 additions & 16 deletions datasetinsights/io/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def download_file(source_uri: str, dest_path: str, file_name: str = None):


def checksum_matches(filepath, expected_checksum, algorithm="CRC32"):
""" Check if the checksum matches
"""Check if the checksum matches
Args:
filepath (str): the doaloaded file path
Expand All @@ -86,7 +86,7 @@ def checksum_matches(filepath, expected_checksum, algorithm="CRC32"):


def validate_checksum(filepath, expected_checksum, algorithm="CRC32"):
""" Validate checksum of the downloaded file.
"""Validate checksum of the downloaded file.
Args:
filepath (str): the doaloaded file path
Expand All @@ -101,7 +101,7 @@ def validate_checksum(filepath, expected_checksum, algorithm="CRC32"):


def compute_checksum(filepath, algorithm="CRC32"):
""" Compute the checksum of a file.
"""Compute the checksum of a file.
Args:
filepath (str): the doaloaded file path
Expand All @@ -121,17 +121,15 @@ def compute_checksum(filepath, algorithm="CRC32"):


def _crc32_checksum(filepath):
""" Calculate the checksum of a file using CRC32.
"""
"""Calculate the checksum of a file using CRC32."""
with open(filepath, "rb") as f:
checksum = zlib.crc32(f.read())

return checksum


def _md5_checksum(filename):
""" Calculate the checksum of a file using MD5.
"""
"""Calculate the checksum of a file using MD5."""
md5 = hashlib.md5()
with open(filename, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
Expand All @@ -140,7 +138,7 @@ def _md5_checksum(filename):


def get_checksum_from_file(filepath):
""" This method return checksum of the file whose filepath is given.
"""This method return checksum of the file whose filepath is given.
Args:
filepath (str): Path of the checksum file.
Expand Down Expand Up @@ -168,7 +166,7 @@ def get_checksum_from_file(filepath):


def _read_checksum_from_txt(filepath):
""" This method reads checksum from a txt file and returns it.
"""This method reads checksum from a txt file and returns it.
Args:
filepath (str): Local filepath of the checksum file.
Expand All @@ -190,14 +188,14 @@ def _parse_filename(response, uri):


def _get_filename_from_response(response):
""" Gets filename from requests response object
"""Gets filename from requests response object
Args:
response: requests.Response() object that contains the server's
response to the HTTP request.
Args:
response: requests.Response() object that contains the server's
response to the HTTP request.
Returns:
filename (str): Name of the file to be downloaded
Returns:
filename (str): Name of the file to be downloaded
"""
cd = response.headers.get("content-disposition")
if not cd:
Expand All @@ -209,7 +207,7 @@ def _get_filename_from_response(response):


def _get_file_name_from_uri(uri):
""" Gets filename from URI
"""Gets filename from URI
Args:
uri (str): URI
Expand Down
Loading

0 comments on commit fe9bc1a

Please sign in to comment.