Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Apm app #544

Open
wants to merge 23 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
36a121e
use pynxtools.nomad.schema.Root
sanbrock Jan 10, 2025
c1bc7d5
not using inner sections
sanbrock Jan 17, 2025
fc5b95b
fix for doc links if name contains _
sanbrock Jan 21, 2025
bfce048
fix format
sanbrock Jan 21, 2025
274e0d6
linting
sanbrock Jan 21, 2025
27fa71c
fixing tests
sanbrock Jan 21, 2025
8fb4953
fixing mime-type for WSL
sanbrock Jan 24, 2025
bd22578
fix for handling raw files in subdirectories
sanbrock Jan 24, 2025
eb07601
Version tracking feature branch to enable usage of specifically custo…
atomprobe-tc Jan 26, 2025
53cf7e3
Updated definitions and modifications on pyproject to prevent the unc…
atomprobe-tc Jan 26, 2025
e124ccd
Moved deps urllib3 and filelock to a nomad set of deps such that thes…
atomprobe-tc Jan 26, 2025
018f834
NXapm updates
atomprobe-tc Jan 27, 2025
8b0ea2a
Bumping ruff to 0.9.3
atomprobe-tc Jan 27, 2025
8a213a4
Edits in reply to how cameca example get parsed in NOMAD
atomprobe-tc Jan 28, 2025
f8b341e
edit defs
atomprobe-tc Jan 28, 2025
12d2aad
Updated defs
atomprobe-tc Jan 28, 2025
9ab43ba
Updated defs for event records
atomprobe-tc Jan 30, 2025
55912f9
updated defs, instrument type
atomprobe-tc Jan 30, 2025
fc1434c
Added an example based on @GinzburgLev and @sanbrock how to debug the…
atomprobe-tc Jan 30, 2025
5fd21c4
Using the refactored NXapm that might break current pynxtools-apm parser
atomprobe-tc Jan 31, 2025
77f9a9a
Fixes of NOMAD APM example
atomprobe-tc Feb 1, 2025
7b117d2
Map NX_POSINT and NX_UINT to m_int64
atomprobe-tc Feb 1, 2025
2768771
run_number a countable
atomprobe-tc Feb 1, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.8.3
rev: v0.9.3
hooks:
# Run the linter.
- id: ruff
# Run the formatter.
- id: ruff-format
- id: ruff-format
31 changes: 31 additions & 0 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "pynx-core debug",
"type": "python",
"request": "launch",
"cwd": "${workspaceFolder}",
"program": "../../.py3.11.11/bin/dataconverter",
"args": [//workflow files str.str, hits.hits, root.root should be created via touch str.str ...
//fancyfile.mimetype,
"--reader",
"apm",
"--nxdl",
"NXapm",
"--output=out.nxs"]
},
{
"name": "nomad parse",
"type": "python",
"request": "launch",
"cwd": "${workspaceFolder}",
"program": "../../.py3.11.11/bin/nomad",
"justMyCode": false,
"args": ["parse", "--show-archive", "../../pynx-apm/pynxtools_apm/tests/prod/ger_saarbruecken_pauly.073.0.nxs"]
},
]
}
60 changes: 30 additions & 30 deletions dev-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,17 @@
# uv pip compile --universal -p 3.11 --extra=dev --extra=docs --output-file=dev-requirements.txt pyproject.toml
anytree==2.12.1
# via pynxtools (pyproject.toml)
ase==3.23.0
ase==3.24.0
# via pynxtools (pyproject.toml)
babel==2.16.0
# via mkdocs-material
certifi==2024.8.30
certifi==2024.12.14
# via requests
cfgv==3.4.0
# via pre-commit
charset-normalizer==3.4.0
charset-normalizer==3.4.1
# via requests
click==8.1.7
click==8.1.8
# via
# pynxtools (pyproject.toml)
# click-default-group
Expand All @@ -28,15 +28,15 @@ colorama==0.4.6
# pytest
contourpy==1.3.1
# via matplotlib
coverage==7.6.9
coverage==7.6.10
# via pytest-cov
cycler==0.12.1
# via matplotlib
distlib==0.3.9
# via virtualenv
filelock==3.16.1
filelock==3.17.0
# via virtualenv
fonttools==4.55.2
fonttools==4.55.6
# via matplotlib
ghp-import==2.1.0
# via mkdocs
Expand All @@ -46,20 +46,20 @@ hjson==3.1.0
# via
# mkdocs-macros-plugin
# super-collections
identify==2.6.3
identify==2.6.6
# via pre-commit
idna==3.10
# via requests
importlib-metadata==8.5.0
importlib-metadata==8.6.1
# via pynxtools (pyproject.toml)
iniconfig==2.0.0
# via pytest
jinja2==3.1.4
jinja2==3.1.5
# via
# mkdocs
# mkdocs-macros-plugin
# mkdocs-material
kiwisolver==1.4.7
kiwisolver==1.4.8
# via matplotlib
lxml==5.3.0
# via pynxtools (pyproject.toml)
Expand All @@ -76,7 +76,7 @@ markupsafe==3.0.2
# via
# jinja2
# mkdocs
matplotlib==3.9.3
matplotlib==3.10.0
# via ase
mergedeep==1.3.4
# via
Expand All @@ -94,13 +94,13 @@ mkdocs-get-deps==0.2.0
# via mkdocs
mkdocs-macros-plugin==1.3.7
# via pynxtools (pyproject.toml)
mkdocs-material==9.5.48
mkdocs-material==9.5.50
# via pynxtools (pyproject.toml)
mkdocs-material-extensions==1.3.1
# via
# pynxtools (pyproject.toml)
# mkdocs-material
mypy==1.13.0
mypy==1.14.1
# via pynxtools (pyproject.toml)
mypy-extensions==1.0.0
# via mypy
Expand Down Expand Up @@ -133,21 +133,21 @@ pathspec==0.12.1
# via
# mkdocs
# mkdocs-macros-plugin
pillow==11.0.0
pillow==11.1.0
# via matplotlib
platformdirs==4.3.6
# via
# mkdocs-get-deps
# virtualenv
pluggy==1.5.0
# via pytest
pre-commit==4.0.1
pre-commit==4.1.0
# via pynxtools (pyproject.toml)
pygments==2.18.0
pygments==2.19.1
# via mkdocs-material
pymdown-extensions==10.12
pymdown-extensions==10.14.1
# via mkdocs-material
pyparsing==3.2.0
pyparsing==3.2.1
# via matplotlib
pytest==8.3.4
# via
Expand Down Expand Up @@ -181,43 +181,43 @@ regex==2024.11.6
# via mkdocs-material
requests==2.32.3
# via mkdocs-material
ruff==0.8.2
ruff==0.9.3
# via pynxtools (pyproject.toml)
scipy==1.14.1
scipy==1.15.1
# via ase
six==1.17.0
# via
# anytree
# python-dateutil
structlog==24.4.0
structlog==25.1.0
# via pynxtools (pyproject.toml)
super-collections==0.5.3
# via mkdocs-macros-plugin
termcolor==2.5.0
# via mkdocs-macros-plugin
tomli==2.2.1 ; python_full_version <= '3.11'
tomli==2.2.1 ; python_full_version == '3.11'
# via coverage
types-pytz==2024.2.0.20241003
types-pytz==2024.2.0.20241221
# via pynxtools (pyproject.toml)
types-pyyaml==6.0.12.20240917
types-pyyaml==6.0.12.20241230
# via pynxtools (pyproject.toml)
types-requests==2.32.0.20241016
# via pynxtools (pyproject.toml)
typing-extensions==4.12.2
# via mypy
tzdata==2024.2
tzdata==2025.1
# via pandas
urllib3==2.2.3
urllib3==2.3.0
# via
# requests
# types-requests
uv==0.5.7
uv==0.5.24
# via pynxtools (pyproject.toml)
virtualenv==20.28.0
virtualenv==20.29.1
# via pre-commit
watchdog==6.0.0
# via mkdocs
xarray==2024.11.0
xarray==2025.1.1
# via pynxtools (pyproject.toml)
zipp==3.21.0
# via importlib-metadata
5 changes: 5 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,11 @@ dev = [
"uv",
"pre-commit",
]
nomad = [
"urllib3<=1.26.20",
"filelock==3.3.1"
]

convert = [
"pynxtools[apm,ellips,em,igor,mpes,raman,stm,xps,xrd]",
]
Expand Down
8 changes: 3 additions & 5 deletions src/pynxtools/dataconverter/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,13 +80,11 @@ def _log(self, path: str, log_type: ValidationProblem, value: Optional[Any], *ar

if log_type == ValidationProblem.UnitWithoutDocumentation:
logger.warning(
f"The unit, {path} = {value}, "
"is being written but has no documentation"
f"The unit, {path} = {value}, is being written but has no documentation"
)
elif log_type == ValidationProblem.InvalidEnum:
logger.warning(
f"The value at {path} should be on of the "
f"following strings: {value}"
f"The value at {path} should be on of the following strings: {value}"
)
elif log_type == ValidationProblem.MissingRequiredGroup:
logger.warning(f"The required group, {path}, hasn't been supplied.")
Expand Down Expand Up @@ -344,7 +342,7 @@ def get_all_defined_required_children_for_elem(xml_element):
list_of_children_to_add.add(f"{name_to_add}/@units")
elif tag == "group":
nxdlpath = (
f'{xml_element.get("nxdlpath")}/{get_nxdl_name_from_elem(child)}'
f"{xml_element.get('nxdlpath')}/{get_nxdl_name_from_elem(child)}"
)
nxdlbase = xml_element.get("nxdlbase")
nx_name = nxdlbase[nxdlbase.rfind("/") + 1 : nxdlbase.rfind(".nxdl")]
Expand Down
18 changes: 6 additions & 12 deletions src/pynxtools/dataconverter/readers/example/reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,22 +106,16 @@ def read(

# virtual datasets slicing
my_path = str(f"{os.path.dirname(__file__)}/../../../data/")
template[("/ENTRY[entry]" "/test_virtual" "_dataset/sliced" "_dataset")] = {
"link": (
f"{my_path}/xarray_saved_small_" "calibration.h5:/binned/BinnedData"
),
template[("/ENTRY[entry]/test_virtual_dataset/sliced_dataset")] = {
"link": (f"{my_path}/xarray_saved_small_calibration.h5:/binned/BinnedData"),
"shape": np.index_exp[:, 1, :, :],
}
template[("/ENTRY[entry]" "/test_virtual" "_dataset/slic" "ed_dataset2")] = {
"link": (
f"{my_path}/xarray_saved_small" "_calibration.h5:/binned/BinnedData"
),
template[("/ENTRY[entry]/test_virtual_dataset/sliced_dataset2")] = {
"link": (f"{my_path}/xarray_saved_small_calibration.h5:/binned/BinnedData"),
"shape": np.index_exp[:, :, :, 1],
}
template[("/ENTRY[entry]" "/test_virtual" "_dataset/slic" "ed_dataset3")] = {
"link": (
f"{my_path}/xarray_saved_small" "_calibration.h5:/binned/BinnedData"
),
template[("/ENTRY[entry]/test_virtual_dataset/sliced_dataset3")] = {
"link": (f"{my_path}/xarray_saved_small_calibration.h5:/binned/BinnedData"),
"shape": np.index_exp[:, :, :, 2:4],
}

Expand Down
2 changes: 1 addition & 1 deletion src/pynxtools/dataconverter/readers/json_map/reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def get_val_nested_keystring_from_dict(keystring, data):
return data[current_key].values
if isinstance(data[current_key], xarray.core.dataset.Dataset):
raise NotImplementedError(
"Xarray datasets are not supported. " "You can only use xarray dataarrays."
"Xarray datasets are not supported. You can only use xarray dataarrays."
)

return data[current_key]
Expand Down
2 changes: 1 addition & 1 deletion src/pynxtools/definitions
Submodule definitions updated 195 files
4 changes: 2 additions & 2 deletions src/pynxtools/nexus/nexus.py
Original file line number Diff line number Diff line change
Expand Up @@ -439,7 +439,7 @@ def process_node(hdf_node, hdf_path, parser, logger, doc=True):
if len(hdf_node.shape) <= 1
else str(decode_if_string(hdf_node[0])).split("\n")
)
logger.debug(f'value: {val[0]} {"..." if len(val) > 1 else ""}')
logger.debug(f"value: {val[0]} {'...' if len(val) > 1 else ''}")
else:
logger.debug(
f"===== GROUP (/{hdf_path} "
Expand All @@ -460,7 +460,7 @@ def process_node(hdf_node, hdf_path, parser, logger, doc=True):
for key, value in hdf_node.attrs.items():
logger.debug(f"===== ATTRS (/{hdf_path}@{key})")
val = str(decode_if_string(value)).split("\n")
logger.debug(f'value: {val[0]} {"..." if len(val) > 1 else ""}')
logger.debug(f"value: {val[0]} {'...' if len(val) > 1 else ''}")
(req_str, nxdef, nxdl_path) = get_nxdl_doc(hdf_info, logger, doc, attr=key)
if (
parser is not None
Expand Down
24 changes: 17 additions & 7 deletions src/pynxtools/nomad/entrypoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def load(self):
name="pynxtools parser",
description="A parser for nexus files.",
mainfile_name_re=r".*\.nxs",
mainfile_mime_re="application/x-hdf5",
mainfile_mime_re="application/x-hdf*",
)

from nomad.config.models.ui import (
Expand All @@ -76,7 +76,7 @@ def load(self):
SearchQuantities,
)

schema = "pynxtools.nomad.schema.NeXus"
schema = "pynxtools.nomad.schema.Root"

nexus_app = AppEntryPoint(
name="NexusApp",
Expand Down Expand Up @@ -105,17 +105,17 @@ def load(self):
Column(quantity=f"entry_type", selected=True),
Column(
title="definition",
quantity=f"data.*.ENTRY[*].definition__field#{schema}",
quantity=f"data.ENTRY[*].definition__field#{schema}",
selected=True,
),
Column(
title="start_time",
quantity=f"data.*.ENTRY[*].start_time__field#{schema}",
quantity=f"data.ENTRY[*].start_time__field#{schema}",
selected=True,
),
Column(
title="title",
quantity=f"data.*.ENTRY[*].title__field#{schema}",
quantity=f"data.ENTRY[*].title__field#{schema}",
selected=True,
),
],
Expand Down Expand Up @@ -161,8 +161,8 @@ def load(self):
"autorange": True,
"nbins": 30,
"scale": "linear",
"quantity": f"data.Root.datetime#{schema}",
"title": "Procesing Time",
"quantity": f"data.ENTRY.start_time__field#{schema}",
"title": "Start Time",
"layout": {
"lg": {"minH": 3, "minW": 3, "h": 4, "w": 12, "y": 0, "x": 0}
},
Expand All @@ -177,6 +177,16 @@ def load(self):
"lg": {"minH": 3, "minW": 3, "h": 8, "w": 4, "y": 0, "x": 12}
},
},
{
"type": "terms",
"show_input": False,
"scale": "linear",
"quantity": f"data.ENTRY.definition__field#{schema}",
"title": "Definition",
"layout": {
"lg": {"minH": 3, "minW": 3, "h": 8, "w": 4, "y": 0, "x": 16}
},
},
{
"type": "periodic_table",
"scale": "linear",
Expand Down
Loading
Loading