diff --git a/.github/workflows/on-push.yml b/.github/workflows/on-push.yml
index a73b356f..13d7bf7c 100644
--- a/.github/workflows/on-push.yml
+++ b/.github/workflows/on-push.yml
@@ -4,6 +4,7 @@ on:
push:
branches:
- master
+ - 185-docs
tags:
- '*'
pull_request:
@@ -110,25 +111,23 @@ jobs:
steps:
- uses: actions/checkout@v4
- - name: Download combined environments
- uses: actions/download-artifact@v4
+ - name: Configure Git Credentials
+ run: |
+ git config user.name github-actions[bot]
+ git config user.email 41898282+github-actions[bot]@users.noreply.github.com
+ - uses: actions/setup-python@v5
with:
- name: combined-environments
- path: ci
- - name: Install Conda environment with Micromamba
- uses: mamba-org/setup-micromamba@v1
+ python-version: 3.x
+ - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV
+ - uses: actions/cache@v4
with:
- environment-file: ci/combined-environment-ci.yml
- environment-name: DEVELOP
- cache-environment: true
- create-args: >-
- python=3.9
- - name: Install package
- run: |
- python -m pip install --no-deps -e .
- - name: Build documentation
- run: |
- make docs-build
+ key: mkdocs-material-${{ env.cache_id }}
+ path: .cache
+ restore-keys: |
+ mkdocs-material-
+ - run: pip install mkdocs-material mkdocs-glightbox
+ - name: deploy documentation
+ run: mkdocs gh-deploy --force
integration-tests:
needs: [combine-environments, unit-tests]
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 2065774a..81635abc 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -6,6 +6,7 @@ repos:
- id: end-of-file-fixer
- id: check-json
- id: check-yaml
+ args: ['--unsafe']
- id: check-toml
- id: debug-statements
- id: mixed-line-ending
@@ -24,9 +25,11 @@ repos:
- id: ruff
args: [--fix, --show-fixes]
- repo: https://github.com/executablebooks/mdformat
- rev: 0.7.16
+ rev: 0.7.19
hooks:
- id: mdformat
+ additional_dependencies:
+ - mdformat-mkdocs[recommended]>=4.0.0
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.9.0
hooks:
diff --git a/Makefile b/Makefile
index 51be2b9f..d10b6e75 100644
--- a/Makefile
+++ b/Makefile
@@ -28,6 +28,6 @@ template-update:
pre-commit run --all-files cruft -c .pre-commit-config-cruft.yaml
docs-build:
- cd docs && rm -fr _api && make clean && make html
+ cd docs && bash make_docs.sh
# DO NOT EDIT ABOVE THIS LINE, ADD COMMANDS BELOW
diff --git a/README.md b/README.md
index 5507c7fa..8b0c188e 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,8 @@
# pyBDY
-[pyBDY documentation](http://pynemo.readthedocs.io/en/latest/index.html). To be updated soon.
+
+
+[pyBDY documentation](https://noc-msm.github.io/pyBDY/).
## How do I get set up?
@@ -8,61 +10,61 @@ These are the steps to take to install pyBDY:
- Clone pyBDY repository:
- ```
- export PYBDY_DIR=$PWD/pyBDY
- git clone https://github.com/NOC-MSM/pyBDY.git
- ```
+ ```
+ export PYBDY_DIR=$PWD/pyBDY
+ git clone https://github.com/NOC-MSM/pyBDY.git
+ ```
- Creating a specific conda virtual environment is highly recommended ([click here for more about virtual
- enviroments](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html)).
- Use the latest version of anaconda (to be added in your .bashrc or load the module in the command line, e.g ` module load anaconda/5-2021`).
+ enviroments](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html)).
+ Use the latest version of anaconda (to be added in your .bashrc or load the module in the command line, e.g ` module load anaconda/5-2021`).
- ```
- cd $PYBDY_DIR
- conda env create -n pybdy -f environment.yml python=3.9
- ```
+ ```
+ cd $PYBDY_DIR
+ conda env create -n pybdy -f environment.yml python=3.9
+ ```
- Activate the new virtual environment:
- ```
- conda activate pybdy
- ```
+ ```
+ conda activate pybdy
+ ```
- To deactivate (not now!):
- ```
- conda deactivate
- ```
+ ```
+ conda deactivate
+ ```
- Make sure the Java Runtime Environment is set e.g.:
- ```
- export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.372.b07-1.el7_9.x86_64/ # e.g. for livljobs\*
- ```
+ ```
+ export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.372.b07-1.el7_9.x86_64/ # e.g. for livljobs\*
+ ```
- Or (downloading from https://jdk.java.net/20/)
+ Or (downloading from https://jdk.java.net/20/)
- ```
- export JAVA_HOME=/Users//Downloads/jdk-20.0.1.jdk/Contents/Home/ # e.g. for mac OSX
- ```
+ ```
+ export JAVA_HOME=/Users//Downloads/jdk-20.0.1.jdk/Contents/Home/ # e.g. for mac OSX
+ ```
- Generalised methods for defining paths are as follows:
+ Generalised methods for defining paths are as follows:
- ```
- export JAVA_HOME=$(readlink -f $(which java)) # UNIX
- export JAVA_HOME=$(/usr/libexec/java_home) # Mac
+ ```
+ export JAVA_HOME=$(readlink -f $(which java)) # UNIX
+ export JAVA_HOME=$(/usr/libexec/java_home) # Mac
- ```
+ ```
- NB the above may not land at the correct directory level, but should find
- the correct root. PyBDY expects this to be the directory level with `lib`
- in which might be e.g. 3 directories back.
+ NB the above may not land at the correct directory level, but should find
+ the correct root. PyBDY expects this to be the directory level with `lib`
+ in which might be e.g. 3 directories back.
- Install pyBDY:
- ```
- pip install -e .
- ```
+ ```
+ pip install -e .
+ ```
This should result in pyBDY being installed in the virtual environment,
and can be checked by entering:
@@ -109,57 +111,57 @@ The
following steps are required,
- Run pyBDY using the namelist file in the inputs folder
- (namelist_local.bdy) from inside the root pyBDY directory, e.g.:
+ (namelist_local.bdy) from inside the root pyBDY directory, e.g.:
- ```
- cd $PYBDY_DIR
- mkdir -p outputs
- pybdy -s inputs/namelist_local.bdy
- ```
+ ```
+ cd $PYBDY_DIR
+ mkdir -p outputs
+ pybdy -s inputs/namelist_local.bdy
+ ```
- This will create two output files `coordinates.bdy.nc` and
- `NNA_R12_bdyT_y1979_m11.nc` in an `./outputs` folder
+ `NNA_R12_bdyT_y1979_m11.nc` in an `./outputs` folder
- To check the coordinates.bdy.nc has the correct boundary points, the
- script `plotting/plot_coords.py` will plot the domain boundaries and show
- the different locations of the rim width (increasing number should
- go inwards).
+ script `plotting/plot_coords.py` will plot the domain boundaries and show
+ the different locations of the rim width (increasing number should
+ go inwards).
- E.g.
- `python plotting/plot_coords.py outputs/NNA_R12_bdyT_y1979m11.nc outputs/coordinates.bdy.nc`
- 
+ E.g.
+ `python plotting/plot_coords.py outputs/NNA_R12_bdyT_y1979m11.nc outputs/coordinates.bdy.nc`
+ 
- The other script `plot_bdy.py` plots extracted variables at the boundaries to help visualise the output (1D or 2D).
- E.g.
- `python plotting/plot_bdy.py outputs/NNA_R12_bdyT_y1979m11.nc votemper`
- 
+ The other script `plot_bdy.py` plots extracted variables at the boundaries to help visualise the output (1D or 2D).
+ E.g.
+ `python plotting/plot_bdy.py outputs/NNA_R12_bdyT_y1979m11.nc votemper`
+ 
- which also works on 2D tidal boundary data (note you can specify an output file name):
- `python plotting/plot_bdy.py outputs/NNA_R12_bdytide_TPXO7p2_M2_grd_Z.nc z1 example_bdy_1d_data.png`
- 
+ which also works on 2D tidal boundary data (note you can specify an output file name):
+ `python plotting/plot_bdy.py outputs/NNA_R12_bdytide_TPXO7p2_M2_grd_Z.nc z1 example_bdy_1d_data.png`
+ 
## Example: generating tidal boundary conditions on ARCHER2
- Activate the new virtual environment:
- ```
- conda activate pybdy
- ```
+ ```
+ conda activate pybdy
+ ```
- Make sure all the directories and files are in place:
- ```
- cd pyBDY
- mkdir outputs
- ln -s /work/n01/n01/shared/jelt/FES2014 inputs/.
-
- ```
+ ```
+ cd pyBDY
+ mkdir outputs
+ ln -s /work/n01/n01/shared/jelt/FES2014 inputs/.
+
+ ```
- Press go:
- ```
- pybdy -s inputs/namelist_local.bdy
- ```
+ ```
+ pybdy -s inputs/namelist_local.bdy
+ ```
Take about 120s. Generates 7 consitutents, using FES2014 data, written
to \`outputs\`:
diff --git a/contribution_guidelines.md b/contribution_guidelines.md
index 8c89739f..5400ab7e 100644
--- a/contribution_guidelines.md
+++ b/contribution_guidelines.md
@@ -24,7 +24,7 @@ If you don't have write permissions on [NOC-MSM/pyBDY](https://github.com/NOC-MS
1. Update the master branch of your local repository: `git checkout master && git pull`
1. Open a GitHub issue or pick one already opened
-1. Create a branch that references the issue number and gives a summary of the changes that will be made: `git branch issue-103-remove-pynemo-traces`
+1. Create a branch that references the issue number and gives a summary of the changes that will be made: `git branch issue-103-remove-pynemo-traces`
1. Switch to that branch (i.e., update HEAD to set that branch as the current branch): `git checkout issue-103-remove-pynemo-traces`
## Stage and commit changes
diff --git a/docs/Makefile b/docs/Makefile
deleted file mode 100644
index 92dd33a1..00000000
--- a/docs/Makefile
+++ /dev/null
@@ -1,20 +0,0 @@
-# Minimal makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line, and also
-# from the environment for the first two.
-SPHINXOPTS ?=
-SPHINXBUILD ?= sphinx-build
-SOURCEDIR = source
-BUILDDIR = _build
-
-# Put it first so that "make" without argument is like "make help".
-help:
- @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
-
-.PHONY: help Makefile
-
-# Catch-all target: route all unknown targets to Sphinx using the new
-# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
-%: Makefile
- @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/docs/assets/icons/favicon.ico b/docs/assets/icons/favicon.ico
new file mode 100644
index 00000000..af432daa
Binary files /dev/null and b/docs/assets/icons/favicon.ico differ
diff --git a/docs/assets/icons/noc_logo.png b/docs/assets/icons/noc_logo.png
new file mode 100644
index 00000000..1ee9f207
Binary files /dev/null and b/docs/assets/icons/noc_logo.png differ
diff --git a/docs/assets/icons/pybdy_logo.png b/docs/assets/icons/pybdy_logo.png
new file mode 100644
index 00000000..6e4f0f18
Binary files /dev/null and b/docs/assets/icons/pybdy_logo.png differ
diff --git a/docs/assets/icons/pybdy_logo_small.png b/docs/assets/icons/pybdy_logo_small.png
new file mode 100644
index 00000000..651d750d
Binary files /dev/null and b/docs/assets/icons/pybdy_logo_small.png differ
diff --git a/docs/assets/images/EN4_sst_climatology_mean.png b/docs/assets/images/EN4_sst_climatology_mean.png
new file mode 100644
index 00000000..4633be65
Binary files /dev/null and b/docs/assets/images/EN4_sst_climatology_mean.png differ
diff --git a/docs/format_docs.py b/docs/format_docs.py
new file mode 100644
index 00000000..3fe156c6
--- /dev/null
+++ b/docs/format_docs.py
@@ -0,0 +1,60 @@
+import glob
+
+markdown_files = glob.glob("./*.md")
+index_files = glob.glob("./index*.md")
+
+markdown_mods = list(set(markdown_files) - set(index_files))
+
+for i in range(len(markdown_mods)):
+ with open(markdown_mods[i], "r") as f:
+ data = f.readlines()
+
+ skip_ind = []
+ for j in range(len(data)):
+ # Swap args for parameters and remove colons
+ if "Arg" in data[j]:
+ data[j] = data[j].replace("s", "").replace("Arg", "Parameters")
+ if (
+ ("Parameters" in data[j]) | ("Returns" in data[j]) | ("Notes" in data[j])
+ ) & (":" in data[j]):
+ data[j] = data[j].replace(":", "")
+
+ # Upgrade some headings
+ if ("# " in data[j]) & ("package" not in data[j]) & ("##" not in data[j]):
+ data[j] = data[j].replace("# ", "")
+ if (" module" in data[j]) & ("## " in data[j]):
+ data[j] = data[j].replace("## ", "# ")
+ if (
+ ("### " in data[j])
+ & ("Parameters" not in data[j])
+ & ("Returns" not in data[j])
+ & ("Notes" not in data[j])
+ ):
+ data[j] = data[j].replace("### ", "## ")
+ data[j] = data[j].replace("#### ", "### ")
+ if ("Parameters" in data[j]) | ("Returns" in data[j]):
+ data[j] = "> " + data[j]
+ if (
+ ("### " in data[j])
+ & ("Parameters" not in data[j])
+ & ("Returns" not in data[j])
+ & ("Notes" not in data[j])
+ ):
+ data[j] = data[j].replace("### ", "### *method* ")
+
+ # Remove notes headings
+ if "## Notes" in data[j]:
+ skip_ind.append(j)
+
+ # Indent arguments
+ if (":" in data[j]) & (data[j][0] != ">"):
+ data[j] = "> " + data[j]
+ if data[j][0] == ">":
+ data[j] = data[j][:-1] + "
" + data[j][-1]
+
+ with open(markdown_mods[i], "w") as f:
+ for j in range(len(data)):
+ if len(skip_ind) > 0:
+ if j in skip_ind:
+ continue
+ f.write(data[j])
diff --git a/docs/grid.md b/docs/grid.md
new file mode 100644
index 00000000..f76a2d0b
--- /dev/null
+++ b/docs/grid.md
@@ -0,0 +1,216 @@
+# grid package
+
+## Submodules
+
+# grid.hgr module
+
+> Created on Mon Feb 03 18:01:00 2025.
+
+@author James Harle
+@author Benjamin Barton
+@author Ryan Patmore
+
+## *class* grid.hgr.H_Grid(hgr_file, name_map_file, logger, dst=1)
+
+> Bases: `object`
+
+### *method* \_\_init\_\_(hgr_file, name_map_file, logger, dst=1)
+
+Master horizontal class.
+
+> ### Parameters
+
+> hgr_file (str) : string of file for loading hgr data
+> name_map_file (str) : string of file for mapping variable names
+> logger (object) : log error and messages
+> dst (bool) : flag for destination (true) or source (false)
+
+> ### Returns
+
+> H_grid (object) : horizontal grid object
+
+### *method* find_hgrid_type()
+
+Find out what type of hoizontal grid is provided A, B or C.
+
+### *method* get_vars(vars_want)
+
+Get the glam, gphi and e scale factors from file if possible.
+
+> ### Parameters
+
+> vars_want (list) : variables needed from file.
+
+> ### Returns
+
+> None : var_list is populated
+
+## grid.hgr.calc_e1_e2(glam, gphi, ij)
+
+Calculate missing scale factor e1 and e2 from glam or gphi.
+
+> ### Parameters
+
+> glam (np.array) : mesh variable glam (lon) [time, j, i]
+> gphi (np.array) : mesh variable gphi (lat) [time, j, i]
+> ij (int) : ij direction 1 (i or x direction) or 2 (j or y direction)
+
+> ### Returns
+
+> e (np.array) : horizontal distance scale factor e
+
+## grid.hgr.calc_grid_from_t(t_mesh, mesh)
+
+Calculate missing glam, gphi or gdep from t-grid.
+
+> ### Parameters
+
+> t_mesh (np.array) : mesh variable glam or gphi on t-grid
+> mesh (str) : grid mesh type (glam, gphi, or gdep of u, v, f)
+
+> ### Returns
+
+> mesh_out (dict) : horizontal grid mesh data variable
+
+## grid.hgr.fill_hgrid_vars(grid_type, grid, missing)
+
+Calculate the missing horizontal grid variables and add them to grid.
+
+> ### Parameters
+
+> grid_type (str) : type of horizontal grid (A, B or C)
+> grid (dict) : dictionary of grid data variable
+> missing (list) : list of missing variables to calculate
+
+> ### Returns
+
+> grid (dict) : horizontal grid data dictionary
+
+# grid.zgr module
+
+> Created on Mon Feb 03 18:01:00 2025.
+
+@author James Harle
+@author Benjamin Barton
+@author Ryan Patmore
+@author Anthony Wise
+
+## *class* grid.zgr.Z_Grid(zgr_file, name_map_file, hgr_type, e_dict, logger, dst=1)
+
+> Bases: `object`
+
+### *method* \_\_init\_\_(zgr_file, name_map_file, hgr_type, e_dict, logger, dst=1)
+
+Master depth class.
+
+> ### Parameters
+
+> zgr_file (str) : string of file for loading zgr data
+> name_map_file (str) : string of file for mapping variable names
+> hgr_type (str) : horizontal grid type
+> e_dict (dict) : dictionary of e1 and e2 scale factors
+> logger (object) : log error and messages
+> dst (bool) : flag for destination (true) or source (false)
+
+> ### Returns
+
+> Depth (object) : Depth object
+
+### *method* find_zgrid_type()
+
+Find out what type of vertical grid is provided zco, zps or sigma levels (sco).
+
+### *method* get_vars(vars_want)
+
+Get the gdep and e3 scale factors from file if possible.
+
+> ### Parameters
+
+> vars_want (list) : variables needed from file.
+
+> ### Returns
+
+> None : var_list is populated
+
+## grid.zgr.calc_gdepw(gdept)
+
+Calculate missing gdepw from gdept.
+
+> ### Parameters
+
+> gdept (np.array) : mesh variable gdept on t-grid
+
+> ### Returns
+
+> dep_out (np.array) : vertical grid mesh data variable
+
+## grid.zgr.fill_zgrid_vars(zgr_type, grid, hgr_type, e_dict, missing)
+
+Calculate the missing vertical grid variables and add them to grid.
+
+> ### Parameters
+
+> zgr_type (str) : type of vertical grid (zco, zps or sco)
+> grid (dict) : dictionary of grid data variable
+> hgr_type (str) : horizontal grid type
+> e_dict (dict) : dictionary of e1 and e2 scale factors
+> missing (list) : list of missing variables to calculate
+
+> ### Returns
+
+> grid (dict) : vertical grid data dictionary
+
+## grid.zgr.horiz_interp_e3_old(e_in, var_in, lev)
+
+Horizontally interpolate the vertical scale factors e3u, e3v, e3f.
+
+Use the horizontal scale factors to calculate interpolation factors.
+To interpolate to get e3u or e3v, input var_in as e3t data but for e3f this
+should be e3u.
+
+> ### Parameters
+
+> e_in (dict) : all horizontal scale factors e1 and e2 in dictionary
+> var_in (np.array) : e scale factor to interpolate from e3t (or e3u for f)
+> lev (str) : grid level type (e3 of u, v, f)
+
+> ### Returns
+
+> e3 (np.array) : vertical distance scale factor e3 of lev
+
+## grid.zgr.horiz_interp_lev(t, w, zgr_type, hgr_type)
+
+Horizontally interpolate the vertical scale factors e3 and gdep.
+
+For A-Grids, u, v and f values are set to t and w values.
+For C-Grids, zps or sco verticle coords are used to define u, v, and f.
+For B-Grids, u and v values are set to f values following zps or sco.
+
+> ### Parameters
+
+> t (np.array) : vertical scale factors e or dep on t points
+> w (np.array) : vertical scale factors e or dep on w points
+> zgr_type (str) : type of vertical grid (zco, zps or sco)
+> hgr_type (str) : horizontal grid type (A, B or C)
+
+> ### Returns
+
+> lev (dict) : vertical distance scale factor e or gdep
+
+## grid.zgr.vert_calc_e3(gdep_mid, gdep_top, lev)
+
+Calculate missing vertical scale factors e3 from gdep.
+
+> ### Parameters
+
+> gdep_mid (np.array) : mesh variable on t levels
+> gdep_top (np.array) : mesh variable on w levels
+> lev (str) : grid level type (e3 of t, w, u, v)
+
+> ### Returns
+
+> e3 (np.array) : vertical distance scale factor e3 of lev
+
+## Module contents
+
+a Python based regional NEMO model configuration toolbox.
diff --git a/docs/index.md b/docs/index.md
index 2fc805e9..455ec11a 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -1,16 +1,799 @@
-# Welcome to pybdy's documentation!
+# pyBDY Documentation
-a Python based regional NEMO model configuration toolbox.
+**Welcome to the documentation for pyBDY (NEMO lateral boundary conditions)**
-```{toctree}
-:caption: 'Contents:'
-:maxdepth: 2
+
-API Reference <_api/pybdy/index>
+## Introduction
+
+pyBDY is a python package to generate lateral boundary conditions for regional NEMO model configurations.
+It has been developed to uses geographical and depth information from an a source data (e.g. a global ocean
+simulation) and translate them to a destination NEMO region simulation. It makes use of a kdtree approximate
+nearest neighbour algorithm in order to provide a generic method of weighted average interpolation for any
+flavour of ocean model. The available options are accessed through a NEMO style namelist.
+
+---
+
+## Contents
+
+- [How to cite :bookmark:](#how-to-cite-bookmark)
+- [Change Log :twisted_rightwards_arrows:](#change-log-twisted_rightwards_arrows)
+- [Dependencies :globe_with_meridians:](#dependencies-globe_with_meridians)
+- [Quick Start Installation :rocket:](#quick-start-installation-rocket)
+- [How to use pyBDY :student:](#how-to-use-pybdy-student)
+- [Worked Example :mechanical_arm:](#worked-example-mechanical_arm)
+- [Tidal Boundary Conditions Generation :sailboat:](#tidal-boundary-conditions-generation-sailboat)
+- [Troubleshooting :safety_vest:](#troubleshooting-safety_vest)
+- [pyBDY Module Structure :scroll:](#pybdy-module-structure-scroll)
+
+
+
+## How to cite :bookmark:
+
+[Back to top](#pybdy-documentation)
+
+Please cite pyBDY version 0.4.0 in your work using:
+
+Harle, J., Barton, B.I., Nagella, S., Crompton, S., Polton J., Patmore, R., Morado, J., Prime, T., Wise, A., De Dominicis, M., Blaker, A. Farey, J.K., (2025). pyBDY - NEMO lateral boundary conditions v0.4.0 [Software]. [https://doi.org](<>)
+
+
+
+## Change Log :twisted_rightwards_arrows:
+
+[Back to top](#pybdy-documentation)
+
+The lastes version of pyBDY is version 0.4.0.
+The changes relative to the previous version (0.3.0) are:
+
+- Sigma to sigma vertical layer interpolation is now possible.
+- Vertical interpolation in pyBDY can now be turned off for zco vertical coodinate data.
+- The namelist has been streamlined to removed variables that are no longer used.
+- Time input in the namelist has changed to offer more granularity.
+- Grid variables names are now specified using a .json file instead of .ncml. Source data is still specified with .nmcl.
+- The boundary is split into chunks to allow for processing smaller sections of data.
+- Boundaries that cross an east - west wrap in source data can be processed.
+- The 1-2-1 horizontal filter has been turned off.
+- The *seawater* dependancy updated to *gsw*.
+- A plotting masking bug has been fixed.
+- There is now horizontal flood filling that will remove zeros from salinity and temperature near land.
+- Bug fix for 90 boundaries that meet diagonally to produce a 90 degree corner.
+- Some unit tests have been added and full integration tests.
+- Documentation has been updated and restructured.
+
+**There is a new library for generating NEMO initial conditions called pyIC.**
+pyIC can be found at: [https://github.com/NOC-MSM/pyIC](https://github.com/NOC-MSM/pyIC)
+
+
+
+## Dependencies :globe_with_meridians:
+
+[Back to top](#pybdy-documentation)
+
+pyBDY is installed under a conda/mamba environment to aid wider distribution and to facilitate development.
+The key dependecies are listed below:
+
+- python=3.9
+- netCDF4
+- scipy
+- numpy
+- xarray
+- matplotlib
+- cartopy
+- thredds_crawler
+- seawater
+- pyqt5
+- pyjnius
+- cftime
+- gsw
+
+A recent JAVA installation is also required.
+
+---
+
+
+
+## Quick Start Installation :rocket:
+
+[Back to top](#pybdy-documentation)
+
+To get started, check out and set up an instance of the pyBDY GitHub [repository](https://github.com/NOC-MSM/pyBDY):
+
+```sh
+export PYBDY_DIR=$PWD/pyBDY
+git clone git@github.com:NOC-MSM/pyBDY.git
+```
+
+\*\*Helpful Tip...
+
+```
+- **It is not advised to checkout the respository in your home directory.**
+```
+
+Creating a specific conda virtual environment is highly recommended ([click here for more about virtual
+enviroments](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html)).
+Load conda (e.g. through anaconda/miniforge) and create the environment through the provided `environment.yml` file.
+
+```sh
+cd $PYBDY_DIR
+conda env create -n pybdy -f environment.yml
+```
+
+Activate the new environment
+
+```sh
+conda activate pybdy
+```
+
+Install pyBDY
+
+```sh
+pip install -e .
+```
+
+Make sure the Java Runtime Environment is set:
+
+```sh
+export JAVA_HOME=path_to_jre
+```
+
+Generalised methods for defining paths are as follows:
+
+```
+export JAVA_HOME=$(readlink -f $(which java)) # UNIX
+export JAVA_HOME=$(/usr/libexec/java_home) # Mac
+```
+
+To check that pyBDY have been correctly installed in the virtual environment,
+enter the following command:
+
+```
+pybdy -v
+```
+
+If it has you should see the help usage prompt:
+
+```
+usage: pybdy -g -s
```
-# Indices and tables
+If not please see the troubleshooting pages for common causes as
+to why the installation may fail.
+
+To deactivate the conda environment:
+
+```
+conda deactivate
+```
+
+
+
+## How to use pyBDY :student:
+
+[Back to top](#pybdy-documentation)
+
+In this documentation "bdy points" refer to the output boundary points generated by pyBDY.
+First follow the installation instructions [Quick Start Installation](#quick-start-installation-rocket).
+
+### Step 1: File Preparation
+
+Copy and paste the following files into your working directory:
+
+- `inputs/namelist_local.bdy`
+
+- `inputs/grid_name_map.json`
+
+- `inputs/src_data_local.ncml`
+
+- `namelist.bdy`: Specifies file paths and configuration options.
+
+- `grid_name_map.json`: Defines variable names in horizontal and vertical grid files.
+
+- `src_data.ncml`: Aggregates and remaps source data variables for PyBDY.
+
+### Step 2: Edit the Namelist `namelist_local.bdy`
+
+Descriptions of all required variables are in
+[`src/pybdy/variable.info`](https://github.com/NOC-MSM/pyBDY/blob/master/src/pybdy/variable.info).
+Here we will summarise the main variables that will need changing to get started.
+
+#### Key Namelist Parameters
+
+- `sn_src_hgr`
+- `sn_src_zgr`
+- `sn_dst_hgr`
+- `sn_dst_zgr`
+- `sn_src_msk`
+- `sn_bathy`
+- `sn_nme_map`
+- `sn_src_dir`
+- `sn_dst_dir`
+- `cn_mask_file`
+- `ln_zinterp`
+- `nn_rimwidth`
+
+##### File Paths
+
+Directory paths in bdy file can be relative or absolute.
+The application picks the relative path from the current working directory.
+
+- **`sn_src_hgr`**: Source horizontal grid file. Use `ncdump -h` or `ncview` to inspect variables. The variable names are mapped in grid_name_map.json. Map extra variable names in `grid_name_map.json` to avoid recalculation. See [Step 4: Setting up the JSON file](#step-4:-setting-up-the-json-file) for variable descriptions and requirements.
+
+- **`sn_src_zgr`**: Source vertical grid file. The file may be the same file as `sn_src_hgr`. The variable names are mapped in grid_name_map.json. Map extra variables like `gdepw`, `gdepu`, `e3w` in `grid_name_map.json` to avoid recalculation. **Note**: Time-varying depths are not used in PyBDY. See [Step 4: Setting up the JSON file](#step-4:-setting-up-the-json-file) for variable descriptions and requirements.
+
+- **`sn_dst_hgr`, `sn_dst_zgr`**: Destination equivalents of the above.
+
+- **`sn_src_msk`**: Source mask file with variables:
+
+ - `tmask`, `umask`, `vmask`, `fmask`
+
+- **`sn_bathy`**: Destination bathymetry file with variable:
+
+ - `Bathymetry`
+
+ - Used to calculate boundary mask if `ln_mask_file` is unset.
+
+ - Can be computed from `e3w` and `bottom_level`:
+
+ ```python
+ gdepw = np.cumsum(e3w, axis=1)
+ grid = np.indices(bottom_level.shape)
+ bathy = gdepw[bottom_level, grid[0], grid[1]]
+ ```
+
+- **`sn_nme_map`**: Path to `grid_name_map.json`
+
+ - **Note**: `ncml` is no longer used for grid input. Use `grid_name_map.json` instead.
+ - See [Step 4: Setting up the JSON file](#step-4:-setting-up-the-json-file) for variable descriptions and [`inputs/grid_name_map_readme.txt`](https://github.com/NOC-MSM/pyBDY/blob/master/inputs/grid_name_map_readme.txt).
+
+- **`sn_src_dir`**: Path to `src_data.ncml`
+
+ - This is an NcML (XML) file that points to source data (not grid) paths. It can also include THREDDS URLs (see `inputs/namelist_remote.bdy` for example).
+ - More detail on setting up the NcML file is in [Step 3: Setting up the NcML file](#step-3:-setting-up-the-ncml-file).
+
+- **`sn_dst_dir`**: Output directory for PyBDY data
+
+- **`cn_mask_file`** *(optional)*: Used to define open boundaries.
+
+ - Values: `-1` (out-of-domain), `0` (land), `1` (water)
+ - If not provided, PyBDY uses bathymetry to infer boundaries
+
+##### Other Settings
+
+- **`ln_dyn2d`**: used to turn on barotropic velocities in boundary processing.
+
+- **`ln_dyn3d`**: used to turn on total velocities in boundary processing. **Note** this is not baroclinic velocities, which is important when running NEMO. You may want ln_dyn2d and ln_dyn3d if testing but usually one of them is sufficient as long as it matches your NEMO namelist setup.
+
+- **`ln_tra`**: used to turn on tracers temperature and salinity in the boundary processing.
+
+- **`ln_ice`**: used to turn on ice boundary conditions so that `ice1`, `ice2` and `ice3` are processed.
+
+- **`ln_zinterp`**: Disables vertical interpolation if `false` and source (parents) **must** use zco vertical levels.
+
+ - Output will match source vertical levels.
+ - If source uses zps or sco, this will be automatically set to `true` during run-time.
+
+- **`nn_rimwidth`**: Number of interior boundary points to generate
+
+ - Typical value: `9`
+ - For tidal boundaries: `1`
+
+#### Time Settings
+
+- Ensure `time_counter` exists in source files
+- Files must be time-ascending
+- NetCDF time metadata must include:
+ - `calendar`: `"gregorian"`, `"noleap"`, or `"360_day"`
+ - `units`: `"seconds since YYYY-MM-DD hh:mm:ss"`
+
+##### Required Namelist Time Parameters
+
+- **`sn_date_start`**: Start date for output (format: `YYYY-MM-DD`)
+- **`sn_date_end`**: End date for output (format: `YYYY-MM-DD`)
+ - The start date and end date of output must fall within the source data time range.
+- **`sn_dst_calendar`**: Output calendar format
+- **`sn_date_origin`**: Time counter reference date for output (format: `YYYY-MM-DD`)
+- **`ln_time_interpolation`**: If `true`, interpolate to daily steps.
+ - If `false`, output uses source data calendar (monthly steps only)
+
+### Step 3: Setting up the NcML file
+
+- The NcML file has wrappers like:
+ - `` which specifies a single NetCDF file or dataset reference. The top level in the example below declares a virtual NetCDF dataset.
+ - `` virtually combine multiple NetCDF files into a single dataset. It has attributes `type` and `dimName`. The `type` of combination can be `joinExisting` or `union`. The `dimName` specifies the dimension along which to join files.
+ - `` is used inside the `` wrapper to find multiple NetCDF files in a directory. This is instead of or in addition to listing them manually with several `` wrappers. `` has attributes like `location` which gives the path to search, `suffix` which gives the file ending and `regExp` which can provide a search expression using Regular Expression (Regex) format. Regex is a special text string that can be used in the NcML file for describing a search pattern to match against some text. You may compare using Regex to filter what files to include in your datasets against using wildcard (\*) to specify a file search pattern in your computer. More information on Regex patterns can be found here [Regex](https://learn.microsoft.com/en-us/dotnet/standard/base-types/regular-expression-language-quick-reference).
+ - `` is a wrapper that allows for renaming a dimension (e.g. time_counter).
+ - `` is a wrapper that allows for renaming or modifying a variable or variable attributes (e.g. units).
+ - `` maps a variable or dimension from its original name to a new name. It must be placed inside a `` or `` wrapper. Variable names can be remapped in a way that only affects the reading of the file whithout modifying the original NetCDF file. This can be useful if the source (parent) data does not have the variables named in the standard way pybdy expects. The renaming can be do using `` and `` where the "v1" is the original name and "v2" is the new name.
+- The dimensions that pybdy expects in the source data are:
+ - `time_counter` - this is the required time dimension name
+ - dimensions in variables must be ordered `time_counter`, `depth`, `y`, `x` if 4 dimensional or ordered `time_counter`, `y`, `x` if 3 dimensional.
+- The variables that pybdy expects in the source data are:
+ - `votemper` - the water temperature
+ - `votemper` - the water salinity
+ - `sossheig` - the sea surface height
+ - `vozocrtx` - the u (northward) component of velocity
+ - `vomecrty` - the v (eastward) component of velocity
+ - `ice1` - a sea ice parameter
+ - `ice2` - a sea ice parameter
+ - `ice3` - a sea ice parameter
+- See `inputs` folder for more examples.
+
+Example structure combining data on the T grid, U grid and V grid each along the time dimension then aggregating them together into a single virtual file:
+
+```xml
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+```
+
+An example NcML expression renaming a variable in joined NetCDF files:
+
+```xml
+
+
+
+
+
+
+
+
+```
+
+### Step 4: Setting up the JSON file
+
+The JSON file "grid_name_map.json" file provides a way to rename/remap the variables from
+names in the file netcdf file to the variable names desired by pybdy. This is
+specifically for the horizontal (hgr) and vertical (zgr) grid files (not data Input/Output).
+In the past this could be done with a .ncml file but now it done using .json.
+
+The "grid_name_map.json" file has "dimension_map", "sc_variable_map" and
+"dst_variable_map", these should not be edited. "sc" refers to the source grid
+and "dst" refers to the destination grid. The list of dimensions t, z, y, x
+under "dimension_map" are "key: value" pairs, where the "key" should be unedited
+and the "value" should be changed to match the name of the respective dimension
+in your netcdf file. The is the same process for the list of variables under
+"variable_map". The "variable_map" is used for both horizontal and vertical
+grid variable names even if they come from separate files.
+
+Below is a decription of each dimension and variable. Not all variables are needed,
+those that are marked with a * below are optional, if you don't have the optional
+variable in your netcdf file leave it as the default "value" and pybdy will do its
+best to calculate it. If the variable is available it should be name mapped
+otherwise pybdy may incorrectly interpret the grid type for example. Variables
+marked \*\* may be optional depending on what other variables are provided.
+In all cases "t" should be size 1. Pybdy does not deal with time varying grids.
+
+Summary of ideal requirements:
+
+- 2D grid (or 3D with time dimension = 1) of `glamt`, `gphit`, `glamu`, `e1t`, `e2t`, `e1u`, etc.
+- 3D grid (or 4D with time dimension = 1) of `gdept`, `gdepw`, `e3t`, `e3w`, and 2D grid of `mbathy` (aka `bottom_level`)
+
+Summary of minimum requirements:
+
+- for the horizontal grid variables we need at least `nav_lat`, `nav_lon` on a 2D grid.
+- for the vertical grid variables we have several possible variations ordered by preference:
+ 1.`gdept` or `e3t` are specified on 3D grids (**Note** et3 is sometimes called et3_0 i.e. non-time varying, but has dimensions t, z, y, x, in this case specify `"et3": "et3_0"`).
+ 2\. 1D depth (or 2D with dimension time and z) `gdept_0` is specified in addition to `mbathy`.
+ 3\. If `mbathy` is missing in the source grid, use `gdept_0` (1D depth) and specify any 2D field (e.g., `"mbathy": "nav_lon"`) for `mbathy` **Not recommended for destination (sn_dst_zgr)**.
+ 4\. `deptht_bounds` is not the same at `gdept`. If it is the only option you need to use it to calculate `gdept` yourself.
+
+```
+"dimension_map"
+
+"t" = time dimension (size 1)
+"z" = depth dimension
+"y" = horizontal dimension often aligned with latitude
+"x" = horizontal dimension often aligned with longitude
+
+"sc_variable_map" and "dst_variable_map" which refer to sc (source grid variables in "sn_src_hgr",
+"sn_src_zgr") and dst (destination grid variables in "sn_dst_hgr", "sn_dst_zgr")
+
+"nav_lon" = ** Longitude on t-grid (dims [y, x])
+ (only needed if glamt is not present in the file)
+"nav_lat" = ** Latitude on t-grid (dims [y, x])
+ (only needed if gphit is not present in the file)
+"glamt" = Longitude on t-grid (dims [t, y, x])
+"gphit" = Latitude on t-grid (dims [t, y, x])
+"glamf" = * Longitude on f-grid (dims [t, y, x])
+"gphif" = * Latitude on f-grid (dims [t, y, x])
+"glamu" = * Longitude on u-grid (dims [t, y, x])
+"gphiu" = * Latitude on u-grid (dims [t, y, x])
+"glamv" = * Longitude on v-grid (dims [t, y, x])
+"gphiv" = * Latitude on v-grid (dims [t, y, x])
+"e1t" = * scale factor distance between grid cell in x direction on t-grid (dims [t, y, x])
+"e2t" = * scale factor distance between grid cell in y direction on t-grid (dims [t, y, x])
+"e1f" = * scale factor distance between grid cell in x direction on f-grid (dims [t, y, x])
+"e2f" = * scale factor distance between grid cell in y direction on f-grid (dims [t, y, x])
+"e1u" = * scale factor distance between grid cell in x direction on u-grid (dims [t, y, x])
+"e2u" = * scale factor distance between grid cell in y direction on u-grid (dims [t, y, x])
+"e1v" = * scale factor distance between grid cell in x direction on v-grid (dims [t, y, x])
+"e2v" = * scale factor distance between grid cell in y direction on v-grid (dims [t, y, x])
+
+"mbathy" = ** index of the ocean bottom level (may be called bottom_level) (dims [t, y, x])
+ (only needed if gdept or e3t not given i.e. gdept_0 given. If gdept_0 is the
+ only option and no mbathy is available offer any variable with dims [t, y, x]
+ or dims [y, x])
+"gdept_0" = ** 1D depth of levels on t-grid and t-levels (dims [t, z])
+ (only needed if gdept or e3t not given)
+"gdept" = ** 3D depth of levels on t-grid and t-levels (dims [t, z, y, x])
+ (only needed if gdept_0 or e3t not given)
+"gdepu" = * 3D depth of levels on u-grid and t-levels (dims [t, z, y, x])
+"gdepv" = * 3D depth of levels on v-grid and t-levels (dims [t, z, y, x])
+"gdepf" = * 3D depth of levels on f-grid and t-levels (dims [t, z, y, x])
+"gdepw" = * 3D depth of levels on t-grid and w-levels (dims [t, z, y, x])
+"gdepuw" = * 3D depth of levels on u-grid and w-levels (dims [t, z, y, x])
+"gdepvw" = * 3D depth of levels on v-grid and w-levels (dims [t, z, y, x])
+"e3t" = ** vertical scale factor distance between t-levels on t-grid (dims [t, z, y, x])
+ (only needed if gdept or gdept_0 not given)
+"e3w" = * vertical scale factor distance between w-levels on t-grid (dims [t, z, y, x])
+"e3u" = * vertical scale factor distance between t-levels on u-grid (dims [t, z, y, x])
+"e3v" = * vertical scale factor distance between t-levels on v-grid (dims [t, z, y, x])
+"e3f" = * vertical scale factor distance between t-levels on f-grid (dims [t, z, y, x])
+"e3uw" = * vertical scale factor distance between w-levels on u-grid (dims [t, z, y, x])
+"e3vw" = * vertical scale factor distance between w-levels on v-grid (dims [t, z, y, x])
+"e3fw" = * vertical scale factor distance between w-levels on f-grid (dims [t, z, y, x])
+```
+
+### Step 5: Running pyBDY
+
+To use pyBDY, the following command is entered: (the example will run a benchmarking test):
+
+```
+pybdy -s /path/to/namelist/file (e.g. ./inputs/namelist_remote.bdy)
+```
+
+This command line tool reads a BDY file, extracts boundary data and prepares the data for a NEMO simulation.
+
+
+
+## Worked Example :mechanical_arm:
+
+[Back to top](#pybdy-documentation)
+
+Here we show a worked example of how to set up the namelist for a different domain than the examples found in the *inputs* folder.
+The example child (destination) here is a regional NEMO model that covers the Indian Ocean and the parent (source) used here is a global NEMO model.
+
+### Namelist File
+
+Below is excerpts from an example *namelist.bdy*.
+
+Here the file paths are set. These can be absolute (i.e. starting with "/") or relative (i.e. starting with "./"). For help with what variables are needed in these files see [How to use pyBDY :student:](#how-to-use-pybdy-student). In the example case, the bathmetry file needed to be calculated before running pybdy. It may also be the case that you need to calculate variables like gdept for the sn_src_zgr file fore running pybdy. For setting up the grid_name_map.json see the JSON file example section below.
+
+```
+!------------------------------------------------------------------------------
+! grid information
+!------------------------------------------------------------------------------
+ sn_src_hgr = '/scratch/India_Test/mesh_mask_ORCA025_light.nc4'
+ sn_src_zgr = '/scratch/India_Test/20241211_restart.nc'
+ sn_dst_hgr = '/scratch/India_Test/domain_cfg.nc' ! Expects vars found in domain_cfg.nc
+ sn_dst_zgr = '/scratch/India_Test/domain_cfg.nc' ! Expects vars: {e3u,e3v,e3w,e3t,nav_lat,nav_lon,mbathy}
+ sn_src_msk = '/scratch/India_Test/mask_3D.nc'
+ sn_bathy = '/scratch/India_Test/domain_cfg_bathy.nc' ! dst bathymetry w/o time dimension
+ !Expects vars: {Bathymetry,nav_lat,nav_lon}
+ sn_nme_map = './india_test/grid_name_map.json' ! json file mapping variable names to netcdf vars
+```
+
+Here the source (parent) data is specified via the .nmcl file in NcML format. For setting up the src_data_local.ncml see the NcML file example section below. The output directory, file name prefix and `\_FillValue` in the netCDF file is specified. The sn_dst_metainfo is set in the netcdf output file `history` attribute. `nn_src_time_adj` does not get used???
+
+```
+!------------------------------------------------------------------------------
+! I/O
+!------------------------------------------------------------------------------
+ sn_src_dir = './india_test/src_data_local.ncml' ! src_files/'
+ sn_dst_dir = '/scratch/benbar/India_Test/'
+ sn_fn = 'india' ! prefix for output files
+ nn_fv = -1e20 ! set fill value for output files
+ nn_src_time_adj = 0 ! src time adjustment
+ sn_dst_metainfo = 'India Data' ! history info
+```
+
+Here some options are set. cn_coords_file is a file that can be output by pybdy.
+In this case, the child (destination) data does not have a pre-defined mask file so pybdy will use the bathymetry provided in sn_bathy to calculate the mask. If the mask produced if not giving the correct boundaries you may need to provide a mask.nc file which you generate. This file contains a 2d mask the same shape as the bathymetry where 1 = "water", 0 = "land" and -1 = "out of domain". Boundary points will be generated between water and "out of domain" which can also be where water meets the edge of the defined 2d area.
+Here, ln_dyn2d will provide a sea surface height (`sossheig`) variable in the output for barotropic velocities.
+ln_dyn3d would define total velocities in the in the output if set to true. Here, ln_dyn3d will not include the barotropic component in the 3d velocities but we do not need it because we have ln_dyn2d=true. At least one or the other of ln_dyn2d or ln_dyn3d should be selected and match options in NEMO.
+Here, ln_tra shows temperature and salinity will be output. ln_ice shows ice will not be output. ln_zinterp shows the vertical interpolation is calculated by pybdy (so should be turned off in NEMO).
+Here, nn_rimwidth is set to 9 to provide 9 layers of boundary points along all boundaries.
+
+```
+!------------------------------------------------------------------------------
+! unstructured open boundaries
+!------------------------------------------------------------------------------
+ ln_coords_file = .true. ! =T : produce bdy coordinates files
+ cn_coords_file = 'coordinates.bdy.nc' ! name of bdy coordinates files
+ ! (if ln_coords_file=.TRUE.)
+ ln_mask_file = .false. ! =T : read mask from file
+ cn_mask_file = 'mask.nc' ! name of mask file
+ ! (if ln_mask_file=.TRUE.)
+ ln_dyn2d = .true. ! boundary conditions for
+ ! barotropic fields
+ ln_dyn3d = .false. ! boundary conditions for
+ ! baroclinic velocities
+ ln_tra = .true. ! boundary conditions for T and S
+ ln_ice = .false. ! ice boundary condition
+ ln_zinterp = .true. ! vertical interpolation
+ nn_rimwidth = 9 ! width of the relaxation zone
+```
+
+In this example we are not producing the tidal forcing on the boundary because ln_tide is set to false. This means the rest of this section does not matter. See [Tidal Boundary Conditions Generation :sailboat:](#tidal-boundary-conditions-generation-sailboat) for more on setting up tidal boundaries.
+
+```
+!------------------------------------------------------------------------------
+! unstructured open boundaries tidal parameters
+!------------------------------------------------------------------------------
+ ln_tide = .false. ! =T : produce bdy tidal conditions
+ sn_tide_model = 'FES2014' ! Name of tidal model. Accepts FES2014, TPXO7p2, or TPXO9v5
+ clname(1) = 'M2' ! constituent name
+ clname(2) = 'S2'
+ clname(3) = 'K2'
+ clname(4) = 'O1'
+ clname(5) = 'P1'
+ clname(6) = 'Q1'
+ clname(7) = 'M4'
+ ln_trans = .true. ! interpolate transport rather than
+ ! velocities
+ ! location of TPXO7.2 data
+ sn_tide_grid_7p2 = './inputs/tpxo7.2/grid_tpxo7.2.nc'
+ sn_tide_h = './inputs/tpxo7.2/h_tpxo7.2.nc'
+ sn_tide_u = './inputs/tpxo7.2/u_tpxo7.2.nc'
+ ! location of TPXO9v5 data: single constituents per file
+ sn_tide_grid_9p5 = './inputs/TPXO9_atlas_v5_nc/grid_tpxo9_atlas_30_v5.nc'
+ sn_tide_dir = './inputs/TPXO9_atlas_v5_nc/'
+ ! location of FES2014 data
+ sn_tide_fes = './inputs/FES2014/'
+```
+
+The time step required in output here are 3 days starting on 12th Dec 2024 (which is also used as the reference date).
+
+```
+!------------------------------------------------------------------------------
+! Time information for output
+!------------------------------------------------------------------------------
+ sn_date_start = '2024-12-12' ! dst output date start YYYY-MM-DD
+ sn_date_end = '2024-12-15' ! dst output date end YYYY-MM-DD
+ sn_dst_calendar = 'gregorian' ! output calendar format
+ sn_date_origin = '2024-12-12' ! reference for time counter YYYY-MM-DD
+ ln_time_interpolation = .true. ! set to false to use parent
+ ! calender for monthly frequency only
+```
+
+These parameters can be left unchanged. We do not recommend changing them.
+
+```
+!------------------------------------------------------------------------------
+! Additional parameters
+!------------------------------------------------------------------------------
+ nn_wei = 1 ! smoothing filter weights
+ rn_r0 = 0.041666666 ! decorrelation distance use in gauss
+ ! smoothing onto dst points. Need to
+ ! make this a funct. of dlon
+ ln_nemo3p4 = .true. ! else presume v3.2 or v3.3
+ nn_alpha = 0 ! Euler rotation angle
+ nn_beta = 0 ! Euler rotation angle
+ nn_gamma = 0 ! Euler rotation angle
+ rn_mask_max_depth = 100.0 ! Maximum depth to be ignored for the mask
+ rn_mask_shelfbreak_dist = 20000.0 ! Distance from the shelf break
+```
+
+### JSON File
+
+This is an example .json file: *grid_name_map.json*. It specifise the names of variables in the source (parent) and destination (child) netCDF grid files. The grid files need to be checked with "ncdump -h" and the variable names matched appropriately. It is expected that some of these variables will not be in your grid files. That is not a problem as long at you map the variables that meet the minimum requirements: see [How to use pyBDY :student:](#how-to-use-pybdy-student) for the minimum requirements.
+
+```json
+{
+ "dimension_map": {
+ "t": "t",
+ "z": "z",
+ "y": "y",
+ "x": "x"
+ },
+ "sc_variable_map": {
+ "nav_lon": "nav_lon",
+ "nav_lat": "nav_lat",
+ "glamt": "glamt",
+ "gphit": "gphit",
+ "glamf": "glamf",
+ "gphif": "gphif",
+ "glamu": "glamu",
+ "gphiu": "gphiu",
+ "glamv": "glamv",
+ "gphiv": "gphiv",
+ "e1t": "e1t",
+ "e2t": "e2t",
+ "e1f": "e1f",
+ "e2f": "e2f",
+ "e1u": "e1u",
+ "e2u": "e2u",
+ "e1v": "e1v",
+ "e2v": "e2v",
+ "mbathy": "nav_lon",
+ "gdept_0": "nav_lev",
+ "gdept": "gdept",
+ "gdepu": "gdepu",
+ "gdepv": "gdepv",
+ "gdepf": "gdepf",
+ "gdepw": "gdepw",
+ "gdepuw": "gdepuw",
+ "gdepvw": "gdepvw",
+ "e3t": "e3t",
+ "e3w": "e3w",
+ "e3u": "e3u",
+ "e3v": "e3v",
+ "e3f": "e3f",
+ "e3uw": "e3uw",
+ "e3vw": "e3vw",
+ "e3fw": "e3fw"
+ },
+ "dst_variable_map": {
+ "nav_lon": "nav_lon",
+ "nav_lat": "nav_lat",
+ "glamt": "glamt",
+ "gphit": "gphit",
+ "glamf": "glamf",
+ "gphif": "gphif",
+ "glamu": "glamu",
+ "gphiu": "gphiu",
+ "glamv": "glamv",
+ "gphiv": "gphiv",
+ "e1t": "e1t",
+ "e2t": "e2t",
+ "e1f": "e1f",
+ "e2f": "e2f",
+ "e1u": "e1u",
+ "e2u": "e2u",
+ "e1v": "e1v",
+ "e2v": "e2v",
+ "mbathy": "bottom_level",
+ "gdept_0": "gdept_0",
+ "gdept": "gdept",
+ "gdepu": "gdepu",
+ "gdepv": "gdepv",
+ "gdepf": "gdepf",
+ "gdepw": "gdepw",
+ "gdepuw": "gdepuw",
+ "gdepvw": "gdepvw",
+ "e3t": "e3t_0",
+ "e3w": "e3w_0",
+ "e3u": "e3u_0",
+ "e3v": "e3v_0",
+ "e3f": "e3f_0",
+ "e3uw": "e3uw_0",
+ "e3vw": "e3vw_0",
+ "e3fw": "e3fw"
+ }
+}
+```
+
+### NcML File
+
+This is an example NcML file which is used to providing file paths for parent (source) data that pybdy will read in.
+The example files name is *src_data_local.ncml*. Here the NcML file combines data on the T grid, U grid and V grid each along the time dimension then aggregating them together into a single virtual file
+
+```xml
+
+
+
+
+
+
+
+```
+
+
+
+## Tidal Boundary Conditions Generation :sailboat:
+
+[Back to top](#pybdy-documentation)
+
+By providing a global tidal model dataset (TPXO and FES are currently supported) pyBDY can generate boundary conditions for the NEMO configuration supplied using the namelist file.
+
+### Namelist options
+
+To use the namelist needs to be configured with the required options. These are listed below:
+
+```
+ln_tide = .true. ! =T : produce bdy tidal conditions
+sn_tide_model = 'FES2014' ! Name of tidal model. Accepts FES2014, TPXO7p2, or TPXO9v5
+clname(1) = 'M2' ! constituent name
+clname(2) = 'S2'
+clname(3) = 'K2'
+clname(4) = 'O1'
+clname(5) = 'P1'
+clname(6) = 'Q1'
+clname(7) = 'M4'
+ln_trans = .true. ! interpolate transport rather than velocities
+! location of TPXO7.2 data
+sn_tide_grid_7p2 = './inputs/tpxo7.2/grid_tpxo7.2.nc'
+sn_tide_h = './inputs/tpxo7.2/h_tpxo7.2.nc'
+sn_tide_u = './inputs/tpxo7.2/u_tpxo7.2.nc'
+! location of TPXO9v5 data: single constituents per file
+sn_tide_grid_9p5 = './inputs/TPXO9_atlas_v5_nc/grid_tpxo9_atlas_30_v5.nc'
+sn_tide_dir = './inputs/TPXO9_atlas_v5_nc/'
+! location of FES2014 data
+sn_tide_fes = './inputs/FES2014/'
+```
+
+These options define the location of the tidal model datasets, note this differs depending on model as TPXO has all harmonic constants in one netcdf file whereas FES has three separate netcdf files (one for amplitude two for currents) for each constant. Extra harmonics can be appended to the clname(n) list. FES supports 34 constants and TPXO7.2 has 13 to choose from. Other versions of TPXO should work with pyBDY but have not been yet been tested. NOTE FES dataset filenames must have be in the format of constituent then type. e.g.:
+
+```
+M2_Z.nc (for amplitude)
+M2_U.nc (for U component of velocity)
+M2_V.nc (for V component of velocity)
+```
+
+If this is not undertaken the pyBDY will not recognise the files. TPXO data files are specified directly so these can be anyname although it is best to stick with the default names as shown above. So far the tidal model datasets have been downloaded and used locally but could also be stored on a THREDDS server although this has not been tested with the global tide models.
+
+Other options include “ln_tide” a boolean that when set to true will generate tidal boundaries. “sn_tide_model” is a string that defines the model to use, currently only “fes” or “tpxo” are supported. “ln_trans” is a boolean that when set to true will interpolate transport rather than velocities.
+
+### Harmonic Output Checker
+
+There is an harmonic output checker that can be utilised to check the output of pyBDY with a reference tide model. So far the only supported reference model is FES but TPXO will be added in the future. Any tidal output from pyBDY can be checked (e.g. FES and TPXO). While using the same model used as input to check output doesn’t improve accuracy, it does confirm that the output is within acceptable/expected limits of the nearest model reference point.
+
+There are differences as pyBDY interpolates the harmonics and the tidal checker does not, so there can be some difference in the values particularly close to coastlines.
+
+The checker can be enabled by editing the following in the relevent bdy file:
+
+```
+ln_tide_checker = .true. ! run tide checker on pyBDY tide output
+sn_ref_model = 'fes' ! which model to check output against (FES only)
+```
+
+The boolean determines if to run the checker or not, this takes place after creating the interpolated harmonics and writing them to disk. The string denotes which tide model to use as reference, so far only FES is supported. The string denoting model is not strictly needed, by default fes is used.
+
+The checker will output information regarding the checking to the NRCT log, and also write an spreadsheet to the output folder containing any exceedance values, the closest reference model value and their locations. Amplitude and phase are checked independently, so both have latitude and longitude associated with them. It is also useful to know the amplitude of a exceeded phase to see how much impact it will have so this is also written to the spreadsheet. An example output is shown below, as can be seen the majority of the amplitudes, both the two amplitudes exceedances and the ones associated with the phase exceedances are low (~0.01), so can most likely be ignored. There a few phase exceedances that have higher amplitudes (~0.2) which would potentially require further investigation. A common reason for such an exceedance is due to coastlines and the relevant point being further away from an FES data point.
+
+The actual thresholds for both amplitude and phase are based on the amplitude of the output or reference, this is due to different tolerances based on the amplitude. e.g. high amplitudes should have lower percentage differences to the FES reference, than lower ones simply due to the absolute amount of the ampltiude itself, e.g. a 0.1 m difference for a 1.0 m amplitude is acceptable but not for a 0.01 m amplitude. The smaller amplitudes contribute less to the overall tide height so larger percentage differences are acceptable. The same also applies to phases, where large amplitude phases have little room for differences but at lower amplitudes this is less critical so a higher threshold is tolerated.
+
+The following power functions are used to determine what threshold to apply based on the reference model amplitude.
+
+#### Amplitude Threshold
+
+```
+Percentage Exceedance = 26.933 * Reference Amplitude ^ -0.396’
+```
+
+#### Phases Threshold
+
+```
+Phase Exceedance = 5.052 * pyBDY Amplitude ^ -0.60
+```
+
+
+
+## Troubleshooting :safety_vest:
+
+[Back to top](#pybdy-documentation)
+
+Always check the pyBDY log file. This is usually saved in the working directory of pyBDY as nrct.log. It gives helpful information which may help to diagnose issues. E.g. ValueErrors that are result of a THREDDS server being down and unable to provide data files.
+
+If you get the error message "Destination touches source i-edge but source is not cylindrical" or you get the error message "Destination touches source j-edge but North Fold is not implemented". There is a plot you can uncomment in pybdy.nemo_bdy_chunk.chunk_bdy() that will show you where pyBDY is attempting to place bdy points.
+
+- For "Destination touches source i-edge but source is not cylindrical", you may have an open boundary in your mask or bathymetry file that is not inside the domain of the source data. If this is the case you need to edit your mask to be land (i.e. zeros) to block the incorrect open boundary.
+- For "Destination touches source j-edge but North Fold is not implemented", your domain probably touches the Arctic North Fold and pyBDY is trying to put an open boundary there. If this is the case you need to edit your mask to be land (i.e. zeros) to block the incorrect open boundary along the north edge of the domain. Do not attept to have a regional model with a boundary crossing the North Fold, this has not be implemented yet.
+
+Check your variable and dimension names match the requirements and are mapped correctly either in the NcML file or JSON for the source data and grid data respectively (see section [How to use pyBDY :student:](#how-to-use-pybdy-student))
+
+If you have time interpolation problems read the section [Time Settings](#time-settings).
+
+
+
+## pyBDY Module Structure :scroll:
+
+[Back to top](#pybdy-documentation)
-- {ref}`genindex`
-- {ref}`modindex`
-- {ref}`search`
+All the classes, methods and functions in pyBDY can be found in the tab along the top of the page.
diff --git a/docs/make.bat b/docs/make.bat
deleted file mode 100644
index 32bb2452..00000000
--- a/docs/make.bat
+++ /dev/null
@@ -1,35 +0,0 @@
-@ECHO OFF
-
-pushd %~dp0
-
-REM Command file for Sphinx documentation
-
-if "%SPHINXBUILD%" == "" (
- set SPHINXBUILD=sphinx-build
-)
-set SOURCEDIR=.
-set BUILDDIR=_build
-
-%SPHINXBUILD% >NUL 2>NUL
-if errorlevel 9009 (
- echo.
- echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
- echo.installed, then set the SPHINXBUILD environment variable to point
- echo.to the full path of the 'sphinx-build' executable. Alternatively you
- echo.may add the Sphinx directory to PATH.
- echo.
- echo.If you don't have Sphinx installed, grab it from
- echo.https://www.sphinx-doc.org/
- exit /b 1
-)
-
-if "%1" == "" goto help
-
-%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
-goto end
-
-:help
-%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
-
-:end
-popd
diff --git a/docs/make_docs.sh b/docs/make_docs.sh
new file mode 100755
index 00000000..fc5a2547
--- /dev/null
+++ b/docs/make_docs.sh
@@ -0,0 +1,35 @@
+# Script for building docstring docs
+pip install sphinx sphinx-markdown-builder sphinx-autodoc-typehints;
+
+sphinx-apidoc -o Sphinx-docs ../src sphinx-apidoc --full -A 'Benjamin Barton'; cd Sphinx-docs;
+
+echo "import os
+import sys
+sys.path.insert(0,os.path.abspath('../..'))
+sys.path.insert(0, os.path.abspath('../../src'))
+
+def skip(app, what, name, obj,would_skip, options):
+ if name in ( '__init__',):
+ return False
+ return would_skip
+def setup(app):
+ app.connect('autodoc-skip-member', skip)
+extensions.append('sphinx_autodoc_typehints')
+" >> conf.py;
+
+make markdown
+
+cd ..
+echo "All Module Structure
+" >> module_structure.md
+tail --lines=+13 ./Sphinx-docs/_build/markdown/index.md >> module_structure.md
+#cp index.md index_full.md
+#tail --lines=+13 ./Sphinx-docs/_build/markdown/index.md >> index_full.md
+#echo "
+#[Back to top](#pybdy-documentation)" >> index_full.md
+
+rm ./Sphinx-docs/_build/markdown/index.md
+mv ./Sphinx-docs/_build/markdown/*.md .
+python format_docs.py
+
+rm -rf Sphinx-docs
diff --git a/docs/module_structure.md b/docs/module_structure.md
new file mode 100644
index 00000000..da55046c
--- /dev/null
+++ b/docs/module_structure.md
@@ -0,0 +1,147 @@
+All Module Structure
+
+- [grid package](grid.md)
+ - [Submodules](grid.md#submodules)
+ - [grid.hgr module](grid.md#module-grid.hgr)
+ - [`H_Grid`](grid.md#grid.hgr.H_Grid)
+ - [`H_Grid.__init__()`](grid.md#grid.hgr.H_Grid.__init__)
+ - [`H_Grid.find_hgrid_type()`](grid.md#grid.hgr.H_Grid.find_hgrid_type)
+ - [`H_Grid.get_vars()`](grid.md#grid.hgr.H_Grid.get_vars)
+ - [`calc_e1_e2()`](grid.md#grid.hgr.calc_e1_e2)
+ - [`calc_grid_from_t()`](grid.md#grid.hgr.calc_grid_from_t)
+ - [`fill_hgrid_vars()`](grid.md#grid.hgr.fill_hgrid_vars)
+ - [grid.zgr module](grid.md#module-grid.zgr)
+ - [`Z_Grid`](grid.md#grid.zgr.Z_Grid)
+ - [`Z_Grid.__init__()`](grid.md#grid.zgr.Z_Grid.__init__)
+ - [`Z_Grid.find_zgrid_type()`](grid.md#grid.zgr.Z_Grid.find_zgrid_type)
+ - [`Z_Grid.get_vars()`](grid.md#grid.zgr.Z_Grid.get_vars)
+ - [`calc_gdepw()`](grid.md#grid.zgr.calc_gdepw)
+ - [`fill_zgrid_vars()`](grid.md#grid.zgr.fill_zgrid_vars)
+ - [`horiz_interp_e3_old()`](grid.md#grid.zgr.horiz_interp_e3_old)
+ - [`horiz_interp_lev()`](grid.md#grid.zgr.horiz_interp_lev)
+ - [`vert_calc_e3()`](grid.md#grid.zgr.vert_calc_e3)
+ - [Module contents](grid.md#module-grid)
+- [pybdy package](pybdy.md)
+ - [Subpackages](pybdy.md#subpackages)
+ - [pybdy.gui package](pybdy.gui.md)
+ - [Submodules](pybdy.gui.md#submodules)
+ - [pybdy.gui.mynormalize module](pybdy.gui.md#module-pybdy.gui.mynormalize)
+ - [pybdy.gui.nemo_bdy_input_window module](pybdy.gui.md#module-pybdy.gui.nemo_bdy_input_window)
+ - [pybdy.gui.nemo_bdy_mask module](pybdy.gui.md#module-pybdy.gui.nemo_bdy_mask)
+ - [pybdy.gui.nemo_bdy_mask_gui module](pybdy.gui.md#module-pybdy.gui.nemo_bdy_mask_gui)
+ - [pybdy.gui.nemo_bdy_namelist_edit module](pybdy.gui.md#module-pybdy.gui.nemo_bdy_namelist_edit)
+ - [pybdy.gui.nemo_ncml_generator module](pybdy.gui.md#module-pybdy.gui.nemo_ncml_generator)
+ - [pybdy.gui.nemo_ncml_tab_widget module](pybdy.gui.md#module-pybdy.gui.nemo_ncml_tab_widget)
+ - [pybdy.gui.selection_editor module](pybdy.gui.md#module-pybdy.gui.selection_editor)
+ - [Module contents](pybdy.gui.md#module-pybdy.gui)
+ - [pybdy.reader package](pybdy.reader.md)
+ - [Submodules](pybdy.reader.md#submodules)
+ - [pybdy.reader.directory module](pybdy.reader.md#module-pybdy.reader.directory)
+ - [pybdy.reader.factory module](pybdy.reader.md#module-pybdy.reader.factory)
+ - [pybdy.reader.ncml module](pybdy.reader.md#module-pybdy.reader.ncml)
+ - [Module contents](pybdy.reader.md#module-pybdy.reader)
+ - [pybdy.tide package](pybdy.tide.md)
+ - [Submodules](pybdy.tide.md#submodules)
+ - [pybdy.tide.fes2014_extract_HC module](pybdy.tide.md#module-pybdy.tide.fes2014_extract_HC)
+ - [pybdy.tide.nemo_bdy_tide module](pybdy.tide.md#module-pybdy.tide.nemo_bdy_tide)
+ - [pybdy.tide.nemo_bdy_tide3 module](pybdy.tide.md#module-pybdy.tide.nemo_bdy_tide3)
+ - [pybdy.tide.nemo_bdy_tide_ncgen module](pybdy.tide.md#module-pybdy.tide.nemo_bdy_tide_ncgen)
+ - [pybdy.tide.tpxo_extract_HC module](pybdy.tide.md#module-pybdy.tide.tpxo_extract_HC)
+ - [Module contents](pybdy.tide.md#module-pybdy.tide)
+ - [pybdy.utils package](pybdy.utils.md)
+ - [Submodules](pybdy.utils.md#submodules)
+ - [pybdy.utils.Constants module](pybdy.utils.md#module-pybdy.utils.Constants)
+ - [pybdy.utils.e3_to_depth module](pybdy.utils.md#module-pybdy.utils.e3_to_depth)
+ - [pybdy.utils.gcoms_break_depth module](pybdy.utils.md#module-pybdy.utils.gcoms_break_depth)
+ - [pybdy.utils.nemo_bdy_lib module](pybdy.utils.md#module-pybdy.utils.nemo_bdy_lib)
+ - [Module contents](pybdy.utils.md#module-pybdy.utils)
+ - [Submodules](pybdy.md#submodules)
+ - [pybdy.nemo_bdy_chunk module](pybdy.md#module-pybdy.nemo_bdy_chunk)
+ - [`chunk_bdy()`](pybdy.md#pybdy.nemo_bdy_chunk.chunk_bdy)
+ - [`chunk_corner()`](pybdy.md#pybdy.nemo_bdy_chunk.chunk_corner)
+ - [`chunk_land()`](pybdy.md#pybdy.nemo_bdy_chunk.chunk_land)
+ - [`chunk_large()`](pybdy.md#pybdy.nemo_bdy_chunk.chunk_large)
+ - [pybdy.nemo_bdy_dst_coord module](pybdy.md#module-pybdy.nemo_bdy_dst_coord)
+ - [`DstCoord`](pybdy.md#pybdy.nemo_bdy_dst_coord.DstCoord)
+ - [pybdy.nemo_bdy_extr_assist module](pybdy.md#module-pybdy.nemo_bdy_extr_assist)
+ - [`check_wrap()`](pybdy.md#pybdy.nemo_bdy_extr_assist.check_wrap)
+ - [`distance_weights()`](pybdy.md#pybdy.nemo_bdy_extr_assist.distance_weights)
+ - [`flood_fill()`](pybdy.md#pybdy.nemo_bdy_extr_assist.flood_fill)
+ - [`get_ind()`](pybdy.md#pybdy.nemo_bdy_extr_assist.get_ind)
+ - [`get_vertical_weights()`](pybdy.md#pybdy.nemo_bdy_extr_assist.get_vertical_weights)
+ - [`get_vertical_weights_zco()`](pybdy.md#pybdy.nemo_bdy_extr_assist.get_vertical_weights_zco)
+ - [`interp_horizontal()`](pybdy.md#pybdy.nemo_bdy_extr_assist.interp_horizontal)
+ - [`interp_vertical()`](pybdy.md#pybdy.nemo_bdy_extr_assist.interp_vertical)
+ - [`valid_index()`](pybdy.md#pybdy.nemo_bdy_extr_assist.valid_index)
+ - [pybdy.nemo_bdy_extr_tm3 module](pybdy.md#module-pybdy.nemo_bdy_extr_tm3)
+ - [`Extract`](pybdy.md#pybdy.nemo_bdy_extr_tm3.Extract)
+ - [`Extract.__init__()`](pybdy.md#pybdy.nemo_bdy_extr_tm3.Extract.__init__)
+ - [`Extract.cal_trans()`](pybdy.md#pybdy.nemo_bdy_extr_tm3.Extract.cal_trans)
+ - [`Extract.extract_month()`](pybdy.md#pybdy.nemo_bdy_extr_tm3.Extract.extract_month)
+ - [`Extract.time_delta()`](pybdy.md#pybdy.nemo_bdy_extr_tm3.Extract.time_delta)
+ - [`Extract.time_interp()`](pybdy.md#pybdy.nemo_bdy_extr_tm3.Extract.time_interp)
+ - [`Extract.write_out()`](pybdy.md#pybdy.nemo_bdy_extr_tm3.Extract.write_out)
+ - [pybdy.nemo_bdy_gen_c module](pybdy.md#module-pybdy.nemo_bdy_gen_c)
+ - [`Boundary`](pybdy.md#pybdy.nemo_bdy_gen_c.Boundary)
+ - [`Boundary.__init__()`](pybdy.md#pybdy.nemo_bdy_gen_c.Boundary.__init__)
+ - [`Boundary.fill()`](pybdy.md#pybdy.nemo_bdy_gen_c.Boundary.fill)
+ - [`Boundary.find_bdy()`](pybdy.md#pybdy.nemo_bdy_gen_c.Boundary.find_bdy)
+ - [`Boundary.remove_duplicate_points()`](pybdy.md#pybdy.nemo_bdy_gen_c.Boundary.remove_duplicate_points)
+ - [`Boundary.remove_landpoints_open_ocean()`](pybdy.md#pybdy.nemo_bdy_gen_c.Boundary.remove_landpoints_open_ocean)
+ - [`Boundary.unique_rows()`](pybdy.md#pybdy.nemo_bdy_gen_c.Boundary.unique_rows)
+ - [pybdy.nemo_bdy_grid_angle module](pybdy.md#module-pybdy.nemo_bdy_grid_angle)
+ - [`GridAngle`](pybdy.md#pybdy.nemo_bdy_grid_angle.GridAngle)
+ - [`GridAngle.__init__()`](pybdy.md#pybdy.nemo_bdy_grid_angle.GridAngle.__init__)
+ - [`GridAngle.get_lam_phi()`](pybdy.md#pybdy.nemo_bdy_grid_angle.GridAngle.get_lam_phi)
+ - [`GridAngle.get_north_dir()`](pybdy.md#pybdy.nemo_bdy_grid_angle.GridAngle.get_north_dir)
+ - [`GridAngle.get_seg_dir()`](pybdy.md#pybdy.nemo_bdy_grid_angle.GridAngle.get_seg_dir)
+ - [`GridAngle.get_sin_cos()`](pybdy.md#pybdy.nemo_bdy_grid_angle.GridAngle.get_sin_cos)
+ - [`GridAngle.trig_eq()`](pybdy.md#pybdy.nemo_bdy_grid_angle.GridAngle.trig_eq)
+ - [pybdy.nemo_bdy_ice module](pybdy.md#module-pybdy.nemo_bdy_ice)
+ - [`BoundaryIce`](pybdy.md#pybdy.nemo_bdy_ice.BoundaryIce)
+ - [`BoundaryIce.__init__()`](pybdy.md#pybdy.nemo_bdy_ice.BoundaryIce.__init__)
+ - [pybdy.nemo_bdy_ncgen module](pybdy.md#module-pybdy.nemo_bdy_ncgen)
+ - [`CreateBDYNetcdfFile()`](pybdy.md#pybdy.nemo_bdy_ncgen.CreateBDYNetcdfFile)
+ - [pybdy.nemo_bdy_ncpop module](pybdy.md#module-pybdy.nemo_bdy_ncpop)
+ - [`write_data_to_file()`](pybdy.md#pybdy.nemo_bdy_ncpop.write_data_to_file)
+ - [pybdy.nemo_bdy_scr_coord module](pybdy.md#module-pybdy.nemo_bdy_scr_coord)
+ - [`ScrCoord`](pybdy.md#pybdy.nemo_bdy_scr_coord.ScrCoord)
+ - [`ScrCoord.__init__()`](pybdy.md#pybdy.nemo_bdy_scr_coord.ScrCoord.__init__)
+ - [pybdy.nemo_bdy_setup module](pybdy.md#module-pybdy.nemo_bdy_setup)
+ - [`Setup`](pybdy.md#pybdy.nemo_bdy_setup.Setup)
+ - [`Setup.__init__()`](pybdy.md#pybdy.nemo_bdy_setup.Setup.__init__)
+ - [`Setup.refresh()`](pybdy.md#pybdy.nemo_bdy_setup.Setup.refresh)
+ - [`Setup.variable_info_reader()`](pybdy.md#pybdy.nemo_bdy_setup.Setup.variable_info_reader)
+ - [`Setup.write()`](pybdy.md#pybdy.nemo_bdy_setup.Setup.write)
+ - [`strip_comments()`](pybdy.md#pybdy.nemo_bdy_setup.strip_comments)
+ - [pybdy.nemo_bdy_source_coord module](pybdy.md#module-pybdy.nemo_bdy_source_coord)
+ - [`SourceCoord`](pybdy.md#pybdy.nemo_bdy_source_coord.SourceCoord)
+ - [`SourceCoord.__init__()`](pybdy.md#pybdy.nemo_bdy_source_coord.SourceCoord.__init__)
+ - [pybdy.nemo_bdy_zgrv2 module](pybdy.md#module-pybdy.nemo_bdy_zgrv2)
+ - [`get_bdy_depths()`](pybdy.md#pybdy.nemo_bdy_zgrv2.get_bdy_depths)
+ - [`get_bdy_depths_old()`](pybdy.md#pybdy.nemo_bdy_zgrv2.get_bdy_depths_old)
+ - [`get_bdy_sc_depths()`](pybdy.md#pybdy.nemo_bdy_zgrv2.get_bdy_sc_depths)
+ - [pybdy.nemo_coord_gen_pop module](pybdy.md#module-pybdy.nemo_coord_gen_pop)
+ - [`Coord`](pybdy.md#pybdy.nemo_coord_gen_pop.Coord)
+ - [`Coord.__init__()`](pybdy.md#pybdy.nemo_coord_gen_pop.Coord.__init__)
+ - [`Coord.add_vars()`](pybdy.md#pybdy.nemo_coord_gen_pop.Coord.add_vars)
+ - [`Coord.build_dict()`](pybdy.md#pybdy.nemo_coord_gen_pop.Coord.build_dict)
+ - [`Coord.closeme()`](pybdy.md#pybdy.nemo_coord_gen_pop.Coord.closeme)
+ - [`Coord.create_dims()`](pybdy.md#pybdy.nemo_coord_gen_pop.Coord.create_dims)
+ - [`Coord.populate()`](pybdy.md#pybdy.nemo_coord_gen_pop.Coord.populate)
+ - [`Coord.set_lenvar()`](pybdy.md#pybdy.nemo_coord_gen_pop.Coord.set_lenvar)
+ - [pybdy.profiler module](pybdy.md#module-pybdy.profiler)
+ - [`Grid`](pybdy.md#pybdy.profiler.Grid)
+ - [`Grid.__init__()`](pybdy.md#pybdy.profiler.Grid.__init__)
+ - [`process_bdy()`](pybdy.md#pybdy.profiler.process_bdy)
+ - [`write_tidal_data()`](pybdy.md#pybdy.profiler.write_tidal_data)
+ - [pybdy.pybdy_exe module](pybdy.md#module-pybdy.pybdy_exe)
+ - [`main()`](pybdy.md#pybdy.pybdy_exe.main)
+ - [pybdy.pybdy_ncml_generator module](pybdy.md#module-pybdy.pybdy_ncml_generator)
+ - [`main()`](pybdy.md#pybdy.pybdy_ncml_generator.main)
+ - [pybdy.pybdy_settings_editor module](pybdy.md#module-pybdy.pybdy_settings_editor)
+ - [`main()`](pybdy.md#pybdy.pybdy_settings_editor.main)
+ - [`open_settings_dialog()`](pybdy.md#pybdy.pybdy_settings_editor.open_settings_dialog)
+ - [`open_settings_window()`](pybdy.md#pybdy.pybdy_settings_editor.open_settings_window)
+ - [pybdy.version module](pybdy.md#module-pybdy.version)
+ - [Module contents](pybdy.md#module-pybdy)
diff --git a/docs/pybdy.gui.md b/docs/pybdy.gui.md
new file mode 100644
index 00000000..9dbc1581
--- /dev/null
+++ b/docs/pybdy.gui.md
@@ -0,0 +1,517 @@
+# pybdy.gui package
+
+## Submodules
+
+# pybdy.gui.mynormalize module
+
+## *class* pybdy.gui.mynormalize.MyNormalize(stretch='linear', exponent=5, vmid=None, vmin=None, vmax=None, clip=False)
+
+> Bases: `Normalize`
+
+A Normalize class for imshow that allows different stretching functions for astronomical images.
+
+### *method* \_\_init\_\_(stretch='linear', exponent=5, vmid=None, vmin=None, vmax=None, clip=False)
+
+Initialise an APLpyNormalize instance.
+
+> Optional Keyword Parametersument
+
+> *vmin*: [ None | float ]
+> : Minimum pixel value to use for the scaling.
+
+> *vmax*: [ None | float ]
+> : Maximum pixel value to use for the scaling.
+
+> *stretch*: [ ‘linear’ | ‘log’ | ‘sqrt’ | ‘arcsinh’ | ‘power’ ]
+> : The stretch function to use (default is ‘linear’).
+
+> *vmid*: [ None | float ]
+> : Mid-pixel value used for the log and arcsinh stretches. If
+> set to None, a default value is picked.
+
+> *exponent*: [ float ]
+> : if self.stretch is set to ‘power’, this is the exponent to use.
+
+> *clip*: [ True | False ]
+> : If clip is True and the given value falls outside the range,
+> the returned value will be 0 or 1, whichever is closer.
+
+### *method* inverse(value)
+
+Maps the normalized value (i.e., index in the colormap) back to image
+data value.
+
+> ### Parameters
+
+value
+
+> : Normalized value.
+
+# pybdy.gui.nemo_bdy_input_window module
+
+Created on 21 Jan 2015.
+
+> @author: Mr. Srikanth Nagella
+
+## *class* pybdy.gui.nemo_bdy_input_window.InputWindow(setup)
+
+> Bases: `QDialog`
+
+Input Window for editing pyBDY settings.
+
+### *method* \_\_init\_\_(setup)
+
+Initialise the UI components.
+
+# pybdy.gui.nemo_bdy_mask module
+
+Mask Class to hold the mask information and operation on mask.
+
+> @author: Mr. Srikanth Nagella
+
+## *class* pybdy.gui.nemo_bdy_mask.Mask(bathymetry_file=None, mask_file=None, min_depth=200.0, shelfbreak_dist=200.0)
+
+> Bases: `object`
+
+Mask holder which reads from a netCDF bathymetry file and stores it in ‘data’ member variable.
+
+### *method* \_\_init\_\_(bathymetry_file=None, mask_file=None, min_depth=200.0, shelfbreak_dist=200.0)
+
+Initialise the Mask data.
+
+### *method* add_mask(index, roi)
+
+Add the masks for the given index values depending on the type of mask selected.
+
+### *method* apply_border_mask(pixels)
+
+Pixels is number of pixels in the border that need applying mask.
+
+### *method* apply_mediterrian_mask()
+
+Apply the mediterrian mask specific for the test bathymetry file.
+
+### *method* fill_small_regions(index)
+
+Fill the small regions of the selection area and fill them up.
+
+### *method* mask_type *= 0*
+
+### *method* min_depth *= 200.0*
+
+### *method* remove_mask(index, roi)
+
+Remove the mask for the given index values depending on the type of mask selected.
+
+### *method* remove_small_regions(index)
+
+Remove the small regions in the selection area and takes only the largest area for mask.
+
+### *method* reset_mask()
+
+Reset the data back to no mask with land fill.
+
+### *method* save_mask(mask_file)
+
+Read the mask data from the mask file.
+
+### *method* select_the_largest_region()
+
+Tide up the mask by selecting the largest masked region.
+
+This is to avoid two disconnected masked regions.
+
+### *method* set_bathymetry_file(bathy_file)
+
+Read the bathymetry file and sets the land to 0 and ocean to 1.
+
+### *method* set_mask_file(mask_file)
+
+Read the mask data from the mask file.
+
+Assumes the mask file is 2D.
+
+### *method* set_mask_type(mask_type)
+
+Set the mask type.
+
+### *method* set_minimum_depth_mask(depth)
+
+### *method* shelfbreak_dist *= 200.0*
+
+# pybdy.gui.nemo_bdy_mask_gui module
+
+Created on 12 Jan 2015.
+
+> @author: Mr. Srikanth Nagella
+
+## *class* pybdy.gui.nemo_bdy_mask_gui.MatplotlibWidget(parent=None, mask=None, min_depth=200.0, shelfbreak_dist=200.0, \*args, \*\*kwargs)
+
+> Bases: `QWidget`
+
+QWidget class for pyBDY mask plot.
+
+### *method* \_\_init\_\_(parent=None, mask=None, min_depth=200.0, shelfbreak_dist=200.0, \*args, \*\*kwargs)
+
+Initialise the mask, matplot and the navigation toolbar.
+
+### *method* add_mask()
+
+Add the selected region in the drawing tool to the mask.
+
+### *method* apply_border_mask()
+
+Apply a mask of given number of pixels at the border of the mask.
+
+### *method* create_basemap()
+
+Draws the basemap and contour with mask information.
+
+### *method* drawing_tool_callback(toolname)
+
+Run callback for the drawing tool when the signal of change of drawing tool is received.
+
+### *method* mask_type *= 0*
+
+### *method* min_depth *= 200.0*
+
+### *method* remove_mask()
+
+Remove the selected region in the drawing tool from the mask.
+
+### *method* reset_mask()
+
+### *method* save_mask_file(mask_file)
+
+Save the mask data to mask_file.
+
+### *method* set_bathymetry_file(bathymetry_filename, mask_file)
+
+Set the bathymetry file.
+
+### *method* set_mask_settings(min_depth, shelfbreak_dist)
+
+Mask settings update.
+
+### *method* set_mask_type(type)
+
+Set the mask type.
+
+### *method* shelfbreak_dist *= 200.0*
+
+## *class* pybdy.gui.nemo_bdy_mask_gui.NemoNavigationToolbar(canvas, parent)
+
+> Bases: `NavigationToolbar2QT`
+
+Custom toolbar for the nemo.
+
+Includes additional buttons for drawing tool and (add,remove) for mask
+in addtion to default NavigationToolbar provided by matplotlib.
+
+### *method* \_\_init\_\_(canvas, parent)
+
+Initialise the toolbar.
+
+### *method* add_mask(\*dummy)
+
+Run callback for add mask button clicked.
+
+### *method* border(\*dummy)
+
+Run callback for border button clicked.
+
+### *method* drawing_tool
+
+### *method* freehand(\*dummy)
+
+Run callback for freehand button clicked.
+
+### *method* get_active_button()
+
+Return the current active button between freehand and rectangle.
+
+### *method* max_depth_mask(\*dummy)
+
+Enable the minimum height mask.
+
+### *method* normal_mask(\*dummy)
+
+Enable the normal mask button.
+
+### *method* rectangle(\*dummy)
+
+Run callback for rectangel button clicked.
+
+### *method* remove_mask(\*dummy)
+
+Run callback for remove mask button clicked.
+
+### *method* reset(\*dummy)
+
+Run callback for reset button clicked.
+
+### *method* shelf_break_mask(\*dummy)
+
+Enable the shelf break mask button.
+
+### *method* update_height_mask(btn_id)
+
+Update the height mask buttons in the interface.
+
+## pybdy.gui.nemo_bdy_mask_gui.set_icon(name)
+
+Create an icon based on the file found in the module directory with input name.
+
+# pybdy.gui.nemo_bdy_namelist_edit module
+
+Editor for namelist.bdy file.
+
+> @author: Mr. Srikanth Nagella
+
+## *class* pybdy.gui.nemo_bdy_namelist_edit.NameListEditor(setup)
+
+> Bases: `QWidget`
+
+GUI for the Namelist file options.
+
+### *method* \_\_init\_\_(setup)
+
+Initialise the constructor for setting up the gui using the settings.
+
+### *method* bathymetry_update
+
+### *method* combo_index_changed(value, name)
+
+Run callback for the dropdown in the settings.
+
+Run callback when the True/False dropdown for the settings,
+
+> : which has a boolean value, is changed.
+
+### *method* init_ui()
+
+Initialise the UI components of the GUI.
+
+### *method* label_changed(value, name)
+
+Run callback when the text is changed in the text box.
+
+### *method* mask_settings_update
+
+### *method* mask_update
+
+### *method* new_settings *= {}*
+
+### *method* state_changed(state, name)
+
+Run callback when the check box state is changed.
+
+This updates the bool_setting.
+
+# pybdy.gui.nemo_ncml_generator module
+
+Created on 6 Aug 2015.
+
+> @author: Shirley Crompton, UK Science and Technology Facilities Council
+
+## *class* pybdy.gui.nemo_ncml_generator.Ncml_generator(basefile)
+
+> Bases: `QDialog`
+
+Gui editor to capture user input.
+
+This is done for the purpose of generating NCML representation of pybdy source datasets.
+
+### *method* \_\_init\_\_(basefile)
+
+Initialise the UI components.
+
+### *method* enable_btn_update(enable_btn)
+
+### *method* enable_tab(enable_btn)
+
+### *method* generate()
+
+### *method* generateNcML(tabsList)
+
+### *method* get_fname()
+
+### *method* get_fname_input()
+
+### *method* indent(elem, level=0)
+
+### *method* initUI()
+
+### *method* url_trawler(url, expr)
+
+# pybdy.gui.nemo_ncml_tab_widget module
+
+Created on 2 Jul 2015.
+
+> @author: Shirley Crompton, UK Science and Technology Facilities Council
+
+## *class* pybdy.gui.nemo_ncml_tab_widget.Ncml_tab(tabName)
+
+> Bases: `QWidget`
+
+Tab contents to define child aggregation.
+
+### *method* \_\_init\_\_(tabName)
+
+Initialise the UI components.
+
+### *method* add_tab()
+
+### *method* initUI()
+
+### *method* resetValues(currentValues=None)
+
+### *method* reset_tab()
+
+### *method* setWidgetStack()
+
+### *method* src_combo_changed(var_name)
+
+### *method* src_tedit_edited()
+
+## *class* pybdy.gui.nemo_ncml_tab_widget.ncml_variable(varName, old_name='')
+
+> Bases: `object`
+
+convenient class to hold the values for a ncml variable.
+
+### *method* \_\_init\_\_(varName, old_name='')
+
+# pybdy.gui.selection_editor module
+
+Code has been taken from matlibplot polygon interaction.
+
+> @author: Mr. Srikanth Nagella
+
+## *class* pybdy.gui.selection_editor.BoxEditor(axes, canvas)
+
+> Bases: `object`
+
+Box editor is to select area using rubber band sort of drawing rectangle.
+
+It uses matplotlib RectangleSelector under the hood.
+
+### *method* \_\_init\_\_(axes, canvas)
+
+Initialise class and creates a rectangle selector.
+
+### *method* disable()
+
+Disable or removes the box selector.
+
+### *method* enable()
+
+Enable the box selector.
+
+### *method* line_select_callback(eclick, erelease)
+
+Run callback to the rectangleselector.
+
+### *method* polygon *= None*
+
+### *method* reset()
+
+Reset the Box selector.
+
+### *method* reset_polygon()
+
+Reset rectangle polygon.
+
+## *class* pybdy.gui.selection_editor.PolygonEditor(axis, canvas)
+
+> Bases: `object`
+
+Editor for the polygons drawn on the map.
+
+### *method* \_\_init\_\_(axis, canvas)
+
+Initialise the editable polygon object.
+
+### *method* add_point(xval, yval)
+
+Add an new point to the selection list and redraws the selection tool.
+
+### *method* button_press_callback(event)
+
+Run callback to mouse press event.
+
+### *method* button_release_callback(event)
+
+Run callback to mouse release event.
+
+### *method* delete_datapoint(event)
+
+Delete the data point under the point in event.
+
+### *method* disable()
+
+Disable the events and the selection.
+
+### *method* draw_callback(dummy_event)
+
+Draw the selection object.
+
+### *method* draw_line()
+
+Draw the line if available.
+
+### *method* draw_polygon()
+
+Draw polygon if available.
+
+### *method* enable()
+
+Enable the selection.
+
+### *method* epsilon *= 3*
+
+### *method* get_index_under_point(event)
+
+Get the index of the point under the event (mouse click).
+
+### *method* insert_datapoint(event)
+
+Insert a new data point between the segment that is closest in polygon.
+
+### *method* motion_notify_callback(event)
+
+Run callback for the mouse motion with button press.
+
+This is to move the edge points of the polygon.
+
+### *method* polygon_changed(poly)
+
+Redraw the polygon.
+
+### *method* refresh()
+
+Refresh the canvas.
+
+This method looks at the list of points available and depending on the number of points
+
+> : in the list creates a point or line or a polygon and draws them.
+
+### *method* reset()
+
+Reset the points in the selection deleting the line and polygon.
+
+### *method* reset_line()
+
+Reset the line i.e removes the line from the axes and resets to None.
+
+### *method* reset_polygon()
+
+Reset the polygon ie. removes the polygon from the axis and reset to None.
+
+### *method* set_visibility(status)
+
+Set the visibility of the selection object.
+
+### *method* show_verts *= True*
+
+## Module contents
diff --git a/docs/pybdy.md b/docs/pybdy.md
new file mode 100644
index 00000000..fcd9bc80
--- /dev/null
+++ b/docs/pybdy.md
@@ -0,0 +1,1032 @@
+# pybdy package
+
+## Subpackages
+
+- [pybdy.gui package](pybdy.gui.md)
+ - [Submodules](pybdy.gui.md#submodules)
+ - [pybdy.gui.mynormalize module](pybdy.gui.md#module-pybdy.gui.mynormalize)
+ - [`MyNormalize`](pybdy.gui.md#pybdy.gui.mynormalize.MyNormalize)
+ - [`MyNormalize.__init__()`](pybdy.gui.md#pybdy.gui.mynormalize.MyNormalize.__init__)
+ - [`MyNormalize.inverse()`](pybdy.gui.md#pybdy.gui.mynormalize.MyNormalize.inverse)
+ - [pybdy.gui.nemo_bdy_input_window module](pybdy.gui.md#module-pybdy.gui.nemo_bdy_input_window)
+ - [`InputWindow`](pybdy.gui.md#pybdy.gui.nemo_bdy_input_window.InputWindow)
+ - [`InputWindow.__init__()`](pybdy.gui.md#pybdy.gui.nemo_bdy_input_window.InputWindow.__init__)
+ - [pybdy.gui.nemo_bdy_mask module](pybdy.gui.md#module-pybdy.gui.nemo_bdy_mask)
+ - [`Mask`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask)
+ - [`Mask.__init__()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.__init__)
+ - [`Mask.add_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.add_mask)
+ - [`Mask.apply_border_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.apply_border_mask)
+ - [`Mask.apply_mediterrian_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.apply_mediterrian_mask)
+ - [`Mask.fill_small_regions()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.fill_small_regions)
+ - [`Mask.mask_type`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.mask_type)
+ - [`Mask.min_depth`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.min_depth)
+ - [`Mask.remove_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.remove_mask)
+ - [`Mask.remove_small_regions()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.remove_small_regions)
+ - [`Mask.reset_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.reset_mask)
+ - [`Mask.save_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.save_mask)
+ - [`Mask.select_the_largest_region()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.select_the_largest_region)
+ - [`Mask.set_bathymetry_file()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.set_bathymetry_file)
+ - [`Mask.set_mask_file()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.set_mask_file)
+ - [`Mask.set_mask_type()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.set_mask_type)
+ - [`Mask.set_minimum_depth_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.set_minimum_depth_mask)
+ - [`Mask.shelfbreak_dist`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask.Mask.shelfbreak_dist)
+ - [pybdy.gui.nemo_bdy_mask_gui module](pybdy.gui.md#module-pybdy.gui.nemo_bdy_mask_gui)
+ - [`MatplotlibWidget`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.MatplotlibWidget)
+ - [`MatplotlibWidget.__init__()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.MatplotlibWidget.__init__)
+ - [`MatplotlibWidget.add_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.MatplotlibWidget.add_mask)
+ - [`MatplotlibWidget.apply_border_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.MatplotlibWidget.apply_border_mask)
+ - [`MatplotlibWidget.create_basemap()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.MatplotlibWidget.create_basemap)
+ - [`MatplotlibWidget.drawing_tool_callback()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.MatplotlibWidget.drawing_tool_callback)
+ - [`MatplotlibWidget.mask_type`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.MatplotlibWidget.mask_type)
+ - [`MatplotlibWidget.min_depth`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.MatplotlibWidget.min_depth)
+ - [`MatplotlibWidget.remove_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.MatplotlibWidget.remove_mask)
+ - [`MatplotlibWidget.reset_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.MatplotlibWidget.reset_mask)
+ - [`MatplotlibWidget.save_mask_file()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.MatplotlibWidget.save_mask_file)
+ - [`MatplotlibWidget.set_bathymetry_file()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.MatplotlibWidget.set_bathymetry_file)
+ - [`MatplotlibWidget.set_mask_settings()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.MatplotlibWidget.set_mask_settings)
+ - [`MatplotlibWidget.set_mask_type()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.MatplotlibWidget.set_mask_type)
+ - [`MatplotlibWidget.shelfbreak_dist`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.MatplotlibWidget.shelfbreak_dist)
+ - [`NemoNavigationToolbar`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.NemoNavigationToolbar)
+ - [`NemoNavigationToolbar.__init__()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.NemoNavigationToolbar.__init__)
+ - [`NemoNavigationToolbar.add_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.NemoNavigationToolbar.add_mask)
+ - [`NemoNavigationToolbar.border()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.NemoNavigationToolbar.border)
+ - [`NemoNavigationToolbar.drawing_tool`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.NemoNavigationToolbar.drawing_tool)
+ - [`NemoNavigationToolbar.freehand()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.NemoNavigationToolbar.freehand)
+ - [`NemoNavigationToolbar.get_active_button()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.NemoNavigationToolbar.get_active_button)
+ - [`NemoNavigationToolbar.max_depth_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.NemoNavigationToolbar.max_depth_mask)
+ - [`NemoNavigationToolbar.normal_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.NemoNavigationToolbar.normal_mask)
+ - [`NemoNavigationToolbar.rectangle()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.NemoNavigationToolbar.rectangle)
+ - [`NemoNavigationToolbar.remove_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.NemoNavigationToolbar.remove_mask)
+ - [`NemoNavigationToolbar.reset()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.NemoNavigationToolbar.reset)
+ - [`NemoNavigationToolbar.shelf_break_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.NemoNavigationToolbar.shelf_break_mask)
+ - [`NemoNavigationToolbar.update_height_mask()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.NemoNavigationToolbar.update_height_mask)
+ - [`set_icon()`](pybdy.gui.md#pybdy.gui.nemo_bdy_mask_gui.set_icon)
+ - [pybdy.gui.nemo_bdy_namelist_edit module](pybdy.gui.md#module-pybdy.gui.nemo_bdy_namelist_edit)
+ - [`NameListEditor`](pybdy.gui.md#pybdy.gui.nemo_bdy_namelist_edit.NameListEditor)
+ - [`NameListEditor.__init__()`](pybdy.gui.md#pybdy.gui.nemo_bdy_namelist_edit.NameListEditor.__init__)
+ - [`NameListEditor.bathymetry_update`](pybdy.gui.md#pybdy.gui.nemo_bdy_namelist_edit.NameListEditor.bathymetry_update)
+ - [`NameListEditor.combo_index_changed()`](pybdy.gui.md#pybdy.gui.nemo_bdy_namelist_edit.NameListEditor.combo_index_changed)
+ - [`NameListEditor.init_ui()`](pybdy.gui.md#pybdy.gui.nemo_bdy_namelist_edit.NameListEditor.init_ui)
+ - [`NameListEditor.label_changed()`](pybdy.gui.md#pybdy.gui.nemo_bdy_namelist_edit.NameListEditor.label_changed)
+ - [`NameListEditor.mask_settings_update`](pybdy.gui.md#pybdy.gui.nemo_bdy_namelist_edit.NameListEditor.mask_settings_update)
+ - [`NameListEditor.mask_update`](pybdy.gui.md#pybdy.gui.nemo_bdy_namelist_edit.NameListEditor.mask_update)
+ - [`NameListEditor.new_settings`](pybdy.gui.md#pybdy.gui.nemo_bdy_namelist_edit.NameListEditor.new_settings)
+ - [`NameListEditor.state_changed()`](pybdy.gui.md#pybdy.gui.nemo_bdy_namelist_edit.NameListEditor.state_changed)
+ - [pybdy.gui.nemo_ncml_generator module](pybdy.gui.md#module-pybdy.gui.nemo_ncml_generator)
+ - [`Ncml_generator`](pybdy.gui.md#pybdy.gui.nemo_ncml_generator.Ncml_generator)
+ - [`Ncml_generator.__init__()`](pybdy.gui.md#pybdy.gui.nemo_ncml_generator.Ncml_generator.__init__)
+ - [`Ncml_generator.enable_btn_update()`](pybdy.gui.md#pybdy.gui.nemo_ncml_generator.Ncml_generator.enable_btn_update)
+ - [`Ncml_generator.enable_tab()`](pybdy.gui.md#pybdy.gui.nemo_ncml_generator.Ncml_generator.enable_tab)
+ - [`Ncml_generator.generate()`](pybdy.gui.md#pybdy.gui.nemo_ncml_generator.Ncml_generator.generate)
+ - [`Ncml_generator.generateNcML()`](pybdy.gui.md#pybdy.gui.nemo_ncml_generator.Ncml_generator.generateNcML)
+ - [`Ncml_generator.get_fname()`](pybdy.gui.md#pybdy.gui.nemo_ncml_generator.Ncml_generator.get_fname)
+ - [`Ncml_generator.get_fname_input()`](pybdy.gui.md#pybdy.gui.nemo_ncml_generator.Ncml_generator.get_fname_input)
+ - [`Ncml_generator.indent()`](pybdy.gui.md#pybdy.gui.nemo_ncml_generator.Ncml_generator.indent)
+ - [`Ncml_generator.initUI()`](pybdy.gui.md#pybdy.gui.nemo_ncml_generator.Ncml_generator.initUI)
+ - [`Ncml_generator.url_trawler()`](pybdy.gui.md#pybdy.gui.nemo_ncml_generator.Ncml_generator.url_trawler)
+ - [pybdy.gui.nemo_ncml_tab_widget module](pybdy.gui.md#module-pybdy.gui.nemo_ncml_tab_widget)
+ - [`Ncml_tab`](pybdy.gui.md#pybdy.gui.nemo_ncml_tab_widget.Ncml_tab)
+ - [`Ncml_tab.__init__()`](pybdy.gui.md#pybdy.gui.nemo_ncml_tab_widget.Ncml_tab.__init__)
+ - [`Ncml_tab.add_tab()`](pybdy.gui.md#pybdy.gui.nemo_ncml_tab_widget.Ncml_tab.add_tab)
+ - [`Ncml_tab.initUI()`](pybdy.gui.md#pybdy.gui.nemo_ncml_tab_widget.Ncml_tab.initUI)
+ - [`Ncml_tab.resetValues()`](pybdy.gui.md#pybdy.gui.nemo_ncml_tab_widget.Ncml_tab.resetValues)
+ - [`Ncml_tab.reset_tab()`](pybdy.gui.md#pybdy.gui.nemo_ncml_tab_widget.Ncml_tab.reset_tab)
+ - [`Ncml_tab.setWidgetStack()`](pybdy.gui.md#pybdy.gui.nemo_ncml_tab_widget.Ncml_tab.setWidgetStack)
+ - [`Ncml_tab.src_combo_changed()`](pybdy.gui.md#pybdy.gui.nemo_ncml_tab_widget.Ncml_tab.src_combo_changed)
+ - [`Ncml_tab.src_tedit_edited()`](pybdy.gui.md#pybdy.gui.nemo_ncml_tab_widget.Ncml_tab.src_tedit_edited)
+ - [`ncml_variable`](pybdy.gui.md#pybdy.gui.nemo_ncml_tab_widget.ncml_variable)
+ - [`ncml_variable.__init__()`](pybdy.gui.md#pybdy.gui.nemo_ncml_tab_widget.ncml_variable.__init__)
+ - [pybdy.gui.selection_editor module](pybdy.gui.md#module-pybdy.gui.selection_editor)
+ - [`BoxEditor`](pybdy.gui.md#pybdy.gui.selection_editor.BoxEditor)
+ - [`BoxEditor.__init__()`](pybdy.gui.md#pybdy.gui.selection_editor.BoxEditor.__init__)
+ - [`BoxEditor.disable()`](pybdy.gui.md#pybdy.gui.selection_editor.BoxEditor.disable)
+ - [`BoxEditor.enable()`](pybdy.gui.md#pybdy.gui.selection_editor.BoxEditor.enable)
+ - [`BoxEditor.line_select_callback()`](pybdy.gui.md#pybdy.gui.selection_editor.BoxEditor.line_select_callback)
+ - [`BoxEditor.polygon`](pybdy.gui.md#pybdy.gui.selection_editor.BoxEditor.polygon)
+ - [`BoxEditor.reset()`](pybdy.gui.md#pybdy.gui.selection_editor.BoxEditor.reset)
+ - [`BoxEditor.reset_polygon()`](pybdy.gui.md#pybdy.gui.selection_editor.BoxEditor.reset_polygon)
+ - [`PolygonEditor`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor)
+ - [`PolygonEditor.__init__()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.__init__)
+ - [`PolygonEditor.add_point()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.add_point)
+ - [`PolygonEditor.button_press_callback()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.button_press_callback)
+ - [`PolygonEditor.button_release_callback()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.button_release_callback)
+ - [`PolygonEditor.delete_datapoint()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.delete_datapoint)
+ - [`PolygonEditor.disable()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.disable)
+ - [`PolygonEditor.draw_callback()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.draw_callback)
+ - [`PolygonEditor.draw_line()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.draw_line)
+ - [`PolygonEditor.draw_polygon()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.draw_polygon)
+ - [`PolygonEditor.enable()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.enable)
+ - [`PolygonEditor.epsilon`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.epsilon)
+ - [`PolygonEditor.get_index_under_point()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.get_index_under_point)
+ - [`PolygonEditor.insert_datapoint()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.insert_datapoint)
+ - [`PolygonEditor.motion_notify_callback()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.motion_notify_callback)
+ - [`PolygonEditor.polygon_changed()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.polygon_changed)
+ - [`PolygonEditor.refresh()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.refresh)
+ - [`PolygonEditor.reset()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.reset)
+ - [`PolygonEditor.reset_line()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.reset_line)
+ - [`PolygonEditor.reset_polygon()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.reset_polygon)
+ - [`PolygonEditor.set_visibility()`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.set_visibility)
+ - [`PolygonEditor.show_verts`](pybdy.gui.md#pybdy.gui.selection_editor.PolygonEditor.show_verts)
+ - [Module contents](pybdy.gui.md#module-pybdy.gui)
+- [pybdy.reader package](pybdy.reader.md)
+ - [Submodules](pybdy.reader.md#submodules)
+ - [pybdy.reader.directory module](pybdy.reader.md#module-pybdy.reader.directory)
+ - [`GridGroup`](pybdy.reader.md#pybdy.reader.directory.GridGroup)
+ - [`GridGroup.__init__()`](pybdy.reader.md#pybdy.reader.directory.GridGroup.__init__)
+ - [`GridGroup.get_meta_data()`](pybdy.reader.md#pybdy.reader.directory.GridGroup.get_meta_data)
+ - [`Reader`](pybdy.reader.md#pybdy.reader.directory.Reader)
+ - [`Reader.__init__()`](pybdy.reader.md#pybdy.reader.directory.Reader.__init__)
+ - [`Reader.calculate_time_interval()`](pybdy.reader.md#pybdy.reader.directory.Reader.calculate_time_interval)
+ - [`Reader.delta_time_interval()`](pybdy.reader.md#pybdy.reader.directory.Reader.delta_time_interval)
+ - [`Reader.get_dir_list()`](pybdy.reader.md#pybdy.reader.directory.Reader.get_dir_list)
+ - [`Reader.get_source_timedata()`](pybdy.reader.md#pybdy.reader.directory.Reader.get_source_timedata)
+ - [`Reader.grid_type_list`](pybdy.reader.md#pybdy.reader.directory.Reader.grid_type_list)
+ - [`Variable`](pybdy.reader.md#pybdy.reader.directory.Variable)
+ - [`Variable.__init__()`](pybdy.reader.md#pybdy.reader.directory.Variable.__init__)
+ - [`Variable.get_attribute_values()`](pybdy.reader.md#pybdy.reader.directory.Variable.get_attribute_values)
+ - [`Variable.get_dimensions()`](pybdy.reader.md#pybdy.reader.directory.Variable.get_dimensions)
+ - [`Variable.set_time_dimension_index()`](pybdy.reader.md#pybdy.reader.directory.Variable.set_time_dimension_index)
+ - [`Variable.time_counter_const`](pybdy.reader.md#pybdy.reader.directory.Variable.time_counter_const)
+ - [pybdy.reader.factory module](pybdy.reader.md#module-pybdy.reader.factory)
+ - [`GetFile()`](pybdy.reader.md#pybdy.reader.factory.GetFile)
+ - [`GetReader()`](pybdy.reader.md#pybdy.reader.factory.GetReader)
+ - [`NetCDFFile`](pybdy.reader.md#pybdy.reader.factory.NetCDFFile)
+ - [`NetCDFFile.__init__()`](pybdy.reader.md#pybdy.reader.factory.NetCDFFile.__init__)
+ - [`NetCDFFile.close()`](pybdy.reader.md#pybdy.reader.factory.NetCDFFile.close)
+ - [pybdy.reader.ncml module](pybdy.reader.md#module-pybdy.reader.ncml)
+ - [`GridGroup`](pybdy.reader.md#pybdy.reader.ncml.GridGroup)
+ - [`GridGroup.__init__()`](pybdy.reader.md#pybdy.reader.ncml.GridGroup.__init__)
+ - [`GridGroup.get_meta_data()`](pybdy.reader.md#pybdy.reader.ncml.GridGroup.get_meta_data)
+ - [`GridGroup.logger`](pybdy.reader.md#pybdy.reader.ncml.GridGroup.logger)
+ - [`GridGroup.update_atrributes()`](pybdy.reader.md#pybdy.reader.ncml.GridGroup.update_atrributes)
+ - [`NcMLFile`](pybdy.reader.md#pybdy.reader.ncml.NcMLFile)
+ - [`NcMLFile.__init__()`](pybdy.reader.md#pybdy.reader.ncml.NcMLFile.__init__)
+ - [`NcMLFile.close()`](pybdy.reader.md#pybdy.reader.ncml.NcMLFile.close)
+ - [`Reader`](pybdy.reader.md#pybdy.reader.ncml.Reader)
+ - [`Reader.__init__()`](pybdy.reader.md#pybdy.reader.ncml.Reader.__init__)
+ - [`Reader.close()`](pybdy.reader.md#pybdy.reader.ncml.Reader.close)
+ - [`Reader.grid_type_list`](pybdy.reader.md#pybdy.reader.ncml.Reader.grid_type_list)
+ - [`Reader.time_counter`](pybdy.reader.md#pybdy.reader.ncml.Reader.time_counter)
+ - [`Variable`](pybdy.reader.md#pybdy.reader.ncml.Variable)
+ - [`Variable.__init__()`](pybdy.reader.md#pybdy.reader.ncml.Variable.__init__)
+ - [`Variable.get_attribute_value()`](pybdy.reader.md#pybdy.reader.ncml.Variable.get_attribute_value)
+ - [`init_jnius()`](pybdy.reader.md#pybdy.reader.ncml.init_jnius)
+ - [Module contents](pybdy.reader.md#module-pybdy.reader)
+- [pybdy.tide package](pybdy.tide.md)
+ - [Submodules](pybdy.tide.md#submodules)
+ - [pybdy.tide.fes2014_extract_HC module](pybdy.tide.md#module-pybdy.tide.fes2014_extract_HC)
+ - [`FesExtract`](pybdy.tide.md#pybdy.tide.fes2014_extract_HC.FesExtract)
+ - [`FesExtract.__init__()`](pybdy.tide.md#pybdy.tide.fes2014_extract_HC.FesExtract.__init__)
+ - [`FesExtract.interpolate_constituents()`](pybdy.tide.md#pybdy.tide.fes2014_extract_HC.FesExtract.interpolate_constituents)
+ - [`bilinear_interpolation()`](pybdy.tide.md#pybdy.tide.fes2014_extract_HC.bilinear_interpolation)
+ - [`interpolate_data()`](pybdy.tide.md#pybdy.tide.fes2014_extract_HC.interpolate_data)
+ - [pybdy.tide.nemo_bdy_tide module](pybdy.tide.md#module-pybdy.tide.nemo_bdy_tide)
+ - [`Extract`](pybdy.tide.md#pybdy.tide.nemo_bdy_tide.Extract)
+ - [`Extract.__init__()`](pybdy.tide.md#pybdy.tide.nemo_bdy_tide.Extract.__init__)
+ - [`Extract.extract_con()`](pybdy.tide.md#pybdy.tide.nemo_bdy_tide.Extract.extract_con)
+ - [pybdy.tide.nemo_bdy_tide3 module](pybdy.tide.md#module-pybdy.tide.nemo_bdy_tide3)
+ - [`constituents_index()`](pybdy.tide.md#pybdy.tide.nemo_bdy_tide3.constituents_index)
+ - [`nemo_bdy_tide_rot()`](pybdy.tide.md#pybdy.tide.nemo_bdy_tide3.nemo_bdy_tide_rot)
+ - [pybdy.tide.nemo_bdy_tide_ncgen module](pybdy.tide.md#module-pybdy.tide.nemo_bdy_tide_ncgen)
+ - [`CreateBDYTideNetcdfFile()`](pybdy.tide.md#pybdy.tide.nemo_bdy_tide_ncgen.CreateBDYTideNetcdfFile)
+ - [pybdy.tide.tpxo_extract_HC module](pybdy.tide.md#module-pybdy.tide.tpxo_extract_HC)
+ - [`TpxoExtract`](pybdy.tide.md#pybdy.tide.tpxo_extract_HC.TpxoExtract)
+ - [`TpxoExtract.__init__()`](pybdy.tide.md#pybdy.tide.tpxo_extract_HC.TpxoExtract.__init__)
+ - [`TpxoExtract.generate_landmask_from_bathymetry()`](pybdy.tide.md#pybdy.tide.tpxo_extract_HC.TpxoExtract.generate_landmask_from_bathymetry)
+ - [`TpxoExtract.interpolate_constituents()`](pybdy.tide.md#pybdy.tide.tpxo_extract_HC.TpxoExtract.interpolate_constituents)
+ - [`bilinear_interpolation()`](pybdy.tide.md#pybdy.tide.tpxo_extract_HC.bilinear_interpolation)
+ - [`interpolate_data()`](pybdy.tide.md#pybdy.tide.tpxo_extract_HC.interpolate_data)
+ - [Module contents](pybdy.tide.md#module-pybdy.tide)
+- [pybdy.utils package](pybdy.utils.md)
+ - [Submodules](pybdy.utils.md#submodules)
+ - [pybdy.utils.Constants module](pybdy.utils.md#module-pybdy.utils.Constants)
+ - [pybdy.utils.e3_to_depth module](pybdy.utils.md#module-pybdy.utils.e3_to_depth)
+ - [`e3_to_depth()`](pybdy.utils.md#pybdy.utils.e3_to_depth.e3_to_depth)
+ - [pybdy.utils.gcoms_break_depth module](pybdy.utils.md#module-pybdy.utils.gcoms_break_depth)
+ - [`gcoms_boundary_masks()`](pybdy.utils.md#pybdy.utils.gcoms_break_depth.gcoms_boundary_masks)
+ - [`gcoms_break_depth()`](pybdy.utils.md#pybdy.utils.gcoms_break_depth.gcoms_break_depth)
+ - [`polcoms_select_domain()`](pybdy.utils.md#pybdy.utils.gcoms_break_depth.polcoms_select_domain)
+ - [pybdy.utils.nemo_bdy_lib module](pybdy.utils.md#module-pybdy.utils.nemo_bdy_lib)
+ - [`bdy_sections()`](pybdy.utils.md#pybdy.utils.nemo_bdy_lib.bdy_sections)
+ - [`bdy_transport()`](pybdy.utils.md#pybdy.utils.nemo_bdy_lib.bdy_transport)
+ - [`dist()`](pybdy.utils.md#pybdy.utils.nemo_bdy_lib.dist)
+ - [`dist_point_to_segment()`](pybdy.utils.md#pybdy.utils.nemo_bdy_lib.dist_point_to_segment)
+ - [`get_output_filename()`](pybdy.utils.md#pybdy.utils.nemo_bdy_lib.get_output_filename)
+ - [`get_output_tidal_filename()`](pybdy.utils.md#pybdy.utils.nemo_bdy_lib.get_output_tidal_filename)
+ - [`psi_field()`](pybdy.utils.md#pybdy.utils.nemo_bdy_lib.psi_field)
+ - [`rot_rep()`](pybdy.utils.md#pybdy.utils.nemo_bdy_lib.rot_rep)
+ - [`sub2ind()`](pybdy.utils.md#pybdy.utils.nemo_bdy_lib.sub2ind)
+ - [`velocity_field()`](pybdy.utils.md#pybdy.utils.nemo_bdy_lib.velocity_field)
+ - [Module contents](pybdy.utils.md#module-pybdy.utils)
+
+## Submodules
+
+# pybdy.nemo_bdy_chunk module
+
+> Created on Thu Dec 19 10:39:46 2024.
+
+@author James Harle
+@author Benjamin Barton
+
+## pybdy.nemo_bdy_chunk.chunk_bdy(bdy)
+
+Master chunking function.
+
+Takes the boundary indicies and turns them into a list of boundary chunks.
+The boundary is first split at natural breaks like land or the east-west wrap.
+The chunks are then split near corners.
+The chunks are then optionally split in the middle if they’re above a certain size
+after attempting to split at corners.
+
+> ### Parameters
+
+> bdy (obj) : organised as bdy_i[point, i/j grid] and rim width bdy_r[point]
+> logger : log error and messages
+
+> ### Returns
+
+> chunk_number (numpy.array) : array of chunk numbers
+
+## pybdy.nemo_bdy_chunk.chunk_corner(ibdy, jbdy, rbdy, chunk_number, rw)
+
+Find corners and split along the change in direction.
+
+To do this we look for a change in direction along each rim.
+
+> ### Parameters
+
+> ibdy (numpy.array) : index in i direction
+> jbdy (numpy.array) : index in j direction
+> rbdy (numpy.array) : rim index
+> chunk_number (numpy.array) : array of chunk numbers. -1 means an unassigned chunk number
+> rw (int) : rimwidth
+
+> ### Returns
+
+> chunk_number (numpy.array) : array of chunk numbers
+
+## pybdy.nemo_bdy_chunk.chunk_land(ibdy, jbdy, chunk_number, rw)
+
+Find natural breaks in the boundary looking for gaps in i and j.
+
+> ### Parameters
+
+> ibdy (numpy.array) : index in i direction
+> jbdy (numpy.array) : index in j direction
+> chunk_number (numpy.array) : array of chunk numbers. -1 means an unassigned chunk number
+> rw (int) : rimwidth
+
+> ### Returns
+
+> chunk_number (numpy.array) : array of chunk numbers
+
+## pybdy.nemo_bdy_chunk.chunk_large(ibdy, jbdy, chunk_number)
+
+Split boundaries that have too much white space and are too large.
+
+> ### Parameters
+
+> ibdy (numpy.array) : index in i direction
+> jbdy (numpy.array) : index in j direction
+> chunk_number (numpy.array) : array of chunk numbers. -1 means an unassigned chunk number
+
+> ### Returns
+
+> chunk_number (numpy.array) : array of chunk numbers
+
+# pybdy.nemo_bdy_dst_coord module
+
+## *class* pybdy.nemo_bdy_dst_coord.DstCoord
+
+> Bases: `object`
+
+Object is currently empty and has data bound to it externally.
+
+Equivalent to Matlab dst_coord.
+
+# pybdy.nemo_bdy_extr_assist module
+
+> Created on Thu Dec 21 17:34:00 2024.
+
+@author James Harle
+@author Benjamin Barton
+
+## pybdy.nemo_bdy_extr_assist.check_wrap(imin, imax, sc_lon)
+
+Check if source domain wraps and dst spans the wrap.
+
+> ### Parameters
+
+> imin (int) : minimum i index
+> imax (int) : maximum i index
+> sc_lon (np.array) : the longitude of the source grid
+
+> ### Returns
+
+> wrap_flag (bool) : if true the sc wraps and dst spans wrap
+
+## pybdy.nemo_bdy_extr_assist.distance_weights(sc_bdy, dist_tot, sc_z_len, r0, logger)
+
+Find the distance weightings for averaging source data to destination.
+
+> ### Parameters
+
+> sc_bdy (numpy.array) : source data
+> dist_tot (numpy.array) : distance from dst point to 9 nearest sc points
+> sc_z_len (int) : the number of depth levels
+> r0 (float) : correlation distance
+> logger : log of statements
+
+> ### Returns
+
+> dist_wei (numpy.array) : weightings for averaging
+> dist_fac (numpy.array) : total weighting factor
+
+## pybdy.nemo_bdy_extr_assist.flood_fill(sc_bdy, isslab, logger)
+
+Fill the data horizontally then downwards to remove nans before interpolation.
+
+> ### Parameters
+
+> sc_bdy (np.array) : souce data [nz_sc, nbdy, 9]
+> isslab (bool) : if true data has vertical cells for vertical flood fill
+> logger : log of statements
+
+> ### Returns
+
+> sc_bdy (np.array) : souce data [nz_sc, nbdy, 9]
+
+## pybdy.nemo_bdy_extr_assist.get_ind(dst_lon, dst_lat, sc_lon, sc_lat)
+
+Calculate indicies of max and min for data extraction.
+
+> ### Parameters
+
+> dst_lon (np.array) : the longitude of the destination grid
+> dst_lat (np.array) : the latitude of the destination grid
+> sc_lon (np.array) : the longitude of the source grid
+> sc_lat (np.array) : the latitude of the source grid
+
+> ### Returns
+
+> imin (int) : minimum i index
+> imax (int) : maximum i index
+> jmin (int) : minimum j index
+> jmax (int) : maximum j index
+
+## pybdy.nemo_bdy_extr_assist.get_vertical_weights(dst_dep, dst_len_z, num_bdy, sc_z, sc_z_len, ind, zco)
+
+Determine 3D depth vertical weights for the linear interpolation onto Dst grid.
+
+Selects 9 source points horizontally around a destination grid point.
+Calculated the distance from each source point to the destination to
+be used in weightings. The resulting arrays are [nz * nbdy * 9, 2].
+
+> ### Parameters
+
+> dst_dep (np.array) : the depth of the destination grid chunk [nz, nbdy]
+> dst_len_z (int) : the length of depth axis of the destination grid
+> num_bdy (int) : number of boundary points in chunk
+> sc_z (np.array) : the depth of the source grid [k, j, i]
+> sc_z_len (int) : the length of depth axis of the source grid
+> ind (np.array) : indices of bdy and 9 nearest neighbours flattened “F” j,i [nbdy, 9]
+> zco (bool) : if True z levels are not spatially varying
+
+> ### Returns
+
+> z9_dist (np.array) : the distance weights of the selected points
+> z9_ind (np.array) : the indices of the sc depth above and below bdy
+
+## pybdy.nemo_bdy_extr_assist.get_vertical_weights_zco(dst_dep, dst_len_z, num_bdy, sc_z, sc_z_len)
+
+Determine vertical weights for the linear interpolation onto Dst grid.
+
+Calculated the vertical distance from each source point to the destination to
+be used in weightings. The resulting arrays are [nbdy * nz, 2].
+
+> Note: z_dist and z_ind are [nbdy\*nz, 2] where [:, 0] is the nearest vertical index
+> and [:, 1] is the index above or below i.e. the vertical index -1 for sc_z > dst_z
+> and vertical index +1 for sc_z \<= dst_z
+
+> ### Parameters
+
+> dst_dep (np.array) : the depth of the destination grid chunk [nz, nbdy]
+> dst_len_z (int) : the length of depth axis of the destination grid
+> num_bdy (int) : number of boundary points in chunk
+> sc_z (np.array) : the depth of the source grid [k, j, i]
+> sc_z_len (int) : the length of depth axis of the source grid
+
+> ### Returns
+
+> z_dist (np.array) : the distance weights of the selected points
+> z_ind (np.array) : the indices of the sc depth above and below bdy
+
+## pybdy.nemo_bdy_extr_assist.interp_horizontal(sc_bdy, dist_wei, dist_fac, logger)
+
+Interpolate the source data to the destination grid using weighted average.
+
+> ### Parameters
+
+> sc_bdy (numpy.array) : source data
+> dist_wei (numpy.array) : weightings for interpolation
+> dist_fac (numpy.array) : sum of weightings
+> logger : log of statements
+
+> ### Returns
+
+> dst_bdy (numpy.array) : destination bdy points with data from source grid
+
+## pybdy.nemo_bdy_extr_assist.interp_vertical(sc_bdy, dst_dep, bdy_bathy, z_ind, z_dist, num_bdy, zinterp=True)
+
+Interpolate source data onto destination vertical levels.
+
+> ### Parameters
+
+> sc_bdy (np.array) : souce data [nz_sc, nbdy, 9]
+> dst_dep (np.array) : the depth of the destination grid chunk [nz, nbdy]
+> bdy_bathy (np.array): the destination grid bdy points bathymetry
+> z_ind (np.array) : the indices of the sc depth above and below bdy
+> z_dist (np.array) : the distance weights of the selected points
+> num_bdy (int) : number of boundary points in chunk
+> zinterp (bool) : vertical interpolation flag
+
+> ### Returns
+
+> data_out (np.array) : source data on destination depth levels
+
+## pybdy.nemo_bdy_extr_assist.valid_index(sc_bdy, logger)
+
+Find an array of valid indicies.
+
+> ### Parameters
+
+> sc_bdy (numpy.array) : source data
+> logger : log of statements
+
+> ### Returns
+
+> data_ind (numpy.array) : indicies of max depth of valid data
+> nan_ind (numpy.array) : indicies where source is land
+
+# pybdy.nemo_bdy_extr_tm3 module
+
+> Created on Wed Sep 12 08:02:46 2012.
+
+This Module defines the extraction of the data from the source grid and does
+the interpolation onto the destination grid.
+
+@author James Harle
+@author John Kazimierz Farey
+
+> @author: Mr. Srikanth Nagella
+
+## *class* pybdy.nemo_bdy_extr_tm3.Extract(setup, SourceCoord, DstCoord, Grid, var_nam, grd, pair)
+
+> Bases: `object`
+
+### *method* \_\_init\_\_(setup, SourceCoord, DstCoord, Grid, var_nam, grd, pair)
+
+Initialise the Extract object.
+
+Parent grid to child grid weights are defined along with rotation
+weightings for vector quantities.
+
+> ### Parameters
+
+> setup (list) : settings for bdy
+> SourceCoord (obj) : source grid information
+> DstCoord (obj) : destination grid information
+> Grid (dict) : containing grid type ‘t’, ‘u’, ‘v’ and source time
+> var_name (list) : netcdf file variable names (str)
+> years (list) : years to extract (default [1979])
+> months (list) : months to extract (default [11])
+
+> ### Returns
+
+> Extract (obj) : Object with indexing arrays and weightings ready for interpolation
+
+### *method* cal_trans(source, dest, year, month)
+
+Translate between calendars and return scale factor and number of days in month.
+
+> ### Parameters
+
+> source : source calendar
+> dest : destination calendar
+> year : input year
+> month : input month
+
+> ### Returns
+
+> sf : scale factor
+> ed : number of days in month
+
+### *method* extract_month(year, month)
+
+Extract monthly data and interpolates onto the destination grid.
+
+> ### Parameters
+
+> year : year of data to be extracted
+> month : month of the year to be extracted
+
+> ### Returns
+
+> self.data_out : data from source on bdy locations and depths
+
+### *method* time_delta(time_counter)
+
+Get time delta and number of time steps per day.
+
+Calculates difference between time steps for time_counter and checks
+these are uniform. Then retrives the number of time steps per day.
+
+> ### Parameters
+
+> time_counter : model time coordinate
+
+> ### Returns
+
+> deltaT : length of time step
+> dstep : number of time steps per day
+
+### *method* time_interp(year, month)
+
+Perform a time interpolation of the BDY data to daily frequency.
+
+This method performs a time interpolation (if required). This is
+necessary if the time frequency is not a factor of monthly output or the
+input and output calendars differ. CF compliant calendar options
+
+> accepted: gregorian | standard, proleptic_gregorian, noleap | 365_day,
+> 360_day or julian.\*
+
+### *method* write_out(year, month, ind, unit_origin)
+
+Write monthy BDY data to netCDF file.
+
+This method writes out all available variables for a given grid along with
+any asscoaied metadata. Currently data are only written out as monthly
+files.
+
+> ### Parameters
+
+> year (int) : year to write out
+> month (int) : month to write out
+> ind (dict): dictionary holding grid information
+> unit_origin (str) : time reference ‘%d 00:00:00’ %date_origin
+
+> ### Returns
+
+> None
+
+# pybdy.nemo_bdy_gen_c module
+
+NEMO Boundary module.
+
+Creates indices for t, u and v points, plus rim gradient.
+The variable names have been renamed to keep consistent with python standards and generalizing
+the variable names eg. bdy_i is used instead of bdy_t
+
+Ported from Matlab code by James Harle
+
+> @author: John Kazimierz Farey
+> @author: Srikanth Nagella.
+
+## *class* pybdy.nemo_bdy_gen_c.Boundary(boundary_mask, settings, grid)
+
+> Bases: `object`
+
+Class for boundary definitions.
+
+### *method* \_\_init\_\_(boundary_mask, settings, grid)
+
+Generate the indices for NEMO Boundary and returns a Grid object with indices.
+
+> ### Parameters
+
+> boundary_mask : boundary mask
+> settings : dictionary of setting values
+> grid : type of the grid ‘t’, ‘u’, ‘v’
+
+> ### Returns
+
+> Boundary (object) : where bdy_i is index and bdy_r is the r index
+
+### *method* fill(mask, ref, brg)
+
+### *method* find_bdy(igrid, jgrid, mask, brg)
+
+Find the border indexes by checking the change from ocean to land.
+
+> Returns the i and j index array where the shift happens.
+
+> ### Parameters
+
+> igrid : I x direction indexes
+> jgrid : J y direction indexes
+> mask : mask data
+> brg : mask index range
+
+> ### Returns
+
+> bdy_i : bdy indexes
+> bdy_r : bdy rim values.
+
+### *method* remove_duplicate_points(bdy_i, bdy_r)
+
+Remove the duplicate points in the bdy_i and return the bdy_i and bdy_r.
+
+> ### Parameters
+
+> bdy_i : bdy indexes
+> bdy_r : bdy rim values.
+
+> ### Returns
+
+> bdy_i : bdy indexes
+> bdy_r : bdy rim values.
+
+### *method* remove_landpoints_open_ocean(mask, bdy_i, bdy_r)
+
+Remove the land points and open ocean points.
+
+### *method* unique_rows(t)
+
+Find indexes of unique rows in the input 2D array.
+
+> ### Parameters
+
+> t : input 2D array.
+
+> ### Returns
+
+> indx : indexes of unique rows
+
+# pybdy.nemo_bdy_grid_angle module
+
+## *class* pybdy.nemo_bdy_grid_angle.GridAngle(hgr, imin, imax, jmin, jmax, cd_type)
+
+> Bases: `object`
+
+Class to get orientation of grid from I and J offsets for different grid types.
+
+### *method* \_\_init\_\_(hgr, imin, imax, jmin, jmax, cd_type)
+
+Get sin and cosin files for the grid angle from North.
+
+> ### Parameters
+
+> hgr : grid object
+> imin : minimum model zonal indices
+> imax : maximum model zonal indices
+> jmin : minimum model meridional indices
+> jmax : maximum model meridional indices
+> cd_type: define the nature of pt2d grid points
+
+> ### Returns
+
+> None : object
+
+### *method* get_lam_phi(map=False, i=0, j=0, single=False)
+
+Get lam/phi in (offset) i/j range for init grid type.
+
+Data must be converted to float64 to prevent dementation of later results.
+
+### *method* get_north_dir()
+
+Find North pole direction and modulus of some point.
+
+### *method* get_seg_dir(north_n)
+
+Find segmentation direction of some point.
+
+### *method* get_sin_cos(nx, ny, nn, sx, sy, sn)
+
+Get sin and cos from lat and lon using using scaler/vectorial products.
+
+### *method* trig_eq(x, eq, z_one, z_two)
+
+Calculate long winded equation of two vars; some lam and phi.
+
+# pybdy.nemo_bdy_ice module
+
+## *class* pybdy.nemo_bdy_ice.BoundaryIce
+
+> Bases: `object`
+
+### *method* \_\_init\_\_()
+
+# pybdy.nemo_bdy_ncgen module
+
+Create a Nemo Bdy netCDF file ready for population.
+
+Written by John Kazimierz Farey, started August 30, 2012
+Port of Matlab code of James Harle
+
+## pybdy.nemo_bdy_ncgen.CreateBDYNetcdfFile(filename, xb_len, x_len, y_len, depth_len, rw, h, orig, fv, calendar, grd)
+
+Create a template of bdy netcdf files. A common for T, I, U, V, E grid types.
+
+# pybdy.nemo_bdy_ncpop module
+
+Created on 3 Oct 2014.
+
+> @author: Mr. Srikanth Nagella
+> Netcdf writer for the bdy output
+
+## pybdy.nemo_bdy_ncpop.write_data_to_file(filename, variable_name, data)
+
+Write the data to the netcdf templete file.
+
+> ### Parameters
+
+filename – output filename
+variable_name – variable name into which the data is written to.
+data – data that will be written to variable in netcdf.
+
+# pybdy.nemo_bdy_scr_coord module
+
+## *class* pybdy.nemo_bdy_scr_coord.ScrCoord
+
+> Bases: `object`
+
+### *method* \_\_init\_\_()
+
+# pybdy.nemo_bdy_setup module
+
+> Created on Wed Sep 12 08:02:46 2012.
+
+Parses a file to find out which nemo boundary settings to use
+
+@author John Kazimierz Farey
+@author James Harle
+
+## *class* pybdy.nemo_bdy_setup.Setup(setfile)
+
+> Bases: `object`
+
+Invoke with a text file location, class init reads and deciphers variables.
+
+This class holds the settings information
+
+### *method* \_\_init\_\_(setfile)
+
+Set up the constructor.
+
+This constructor reads the settings file and sets the dictionary with
+setting name/key and it’s value.
+
+> ### Parameters
+
+> setfile (str) : settings file
+
+### *method* refresh()
+
+Reload the settings from file.
+
+### *method* variable_info_reader(filename)
+
+Read the variable description data from the ‘variable.info’ file.
+
+This method reads the variable description data from ‘variable.info’ file
+in the pybdy installation path if it can’t find the file with the same
+name as input bdy file with extension .info
+
+> ### Parameters
+
+filename – filename of the variables information
+returns a dictionary with variable name and its description.
+
+> ### Returns
+
+> variable_info : dict
+
+### *method* write()
+
+Write backs the variable data back into the file.
+
+## pybdy.nemo_bdy_setup.strip_comments(line)
+
+Strip the comments in the line. removes text after !.
+
+# pybdy.nemo_bdy_source_coord module
+
+## *class* pybdy.nemo_bdy_source_coord.SourceCoord
+
+> Bases: `object`
+
+### *method* \_\_init\_\_()
+
+Initialise the source coordinates attributes of the object.
+
+# pybdy.nemo_bdy_zgrv2 module
+
+Created.
+
+@author John Kazimierz Farey
+@author Benjamin Barton.
+
+## pybdy.nemo_bdy_zgrv2.get_bdy_depths(DstCoord, bdy_i, grd)
+
+Depth levels from the nearest neighbour on the source grid.
+
+> ### Parameters
+
+> DstCoord (object) : Object containing destination grid info
+> bdy_i (np.array) : indices of the i, j bdy points [bdy, 2]
+> grd (str) : grid type t, u, v
+
+> ### Returns
+
+> bdy_tz (array) : sc depths on bdy points on t levels
+> bdy_wz (array) : sc depths on bdy points on w levels
+> bdy_e3 (array) : sc level thickness on bdy points on t levels
+
+## pybdy.nemo_bdy_zgrv2.get_bdy_depths_old(bdy_t, bdy_u, bdy_v, DstCoord, settings)
+
+Generate Depth information.
+
+Written by John Kazimierz Farey, Sep 2012
+Port of Matlab code of James Harle
+
+Generates depth points for t, u and v in one loop iteration.
+Initialise with bdy t, u and v grid attributes (Grid.bdy_i) and settings dictionary.
+
+## pybdy.nemo_bdy_zgrv2.get_bdy_sc_depths(SourceCoord, DstCoord, grd)
+
+Depth levels from the nearest neighbour on the source grid.
+
+> ### Parameters
+
+> SourceCoord (object) : Object containing source grid info
+> DstCoord (object) : Object containing destination grid info
+> grd (str) : grid type t, u, v
+
+> ### Returns
+
+> bdy_tz (array) : sc depths on bdy points on t levels
+> bdy_wz (array) : sc depths on bdy points on w levels
+> bdy_e3 (array) : sc level thickness on bdy points on t levels
+
+# pybdy.nemo_coord_gen_pop module
+
+Module that combines matlab coord gen and pop.
+
+Initialise with netcdf file name and dictionary containing all bdy grids (objects).
+
+## *class* pybdy.nemo_coord_gen_pop.Coord(fname, bdy_ind)
+
+> Bases: `object`
+
+Class for writing boundayr coordinate data to netcdf file.
+
+### *method* \_\_init\_\_(fname, bdy_ind)
+
+Create Nemo bdy indices for t, u, v points.
+
+> ### Parameters
+
+> fname (str) : file name of coords file for output
+> bdy_ind (numpy.array) : indicies of bdy points
+
+> ### Returns
+
+> None : object
+
+### *method* add_vars(dim, grd, unt)
+
+Create a var w/ attributes.
+
+### *method* build_dict(dim, units)
+
+Set up a grid dictionary.
+
+### *method* closeme()
+
+### *method* create_dims()
+
+Create dims and returns a dictionary of them.
+
+### *method* populate(hgr)
+
+Populate the file with indices, lat, lon, and e dimensions.
+
+### *method* set_lenvar(vardic, hgr=None, unt=None)
+
+Set the len var of each array in the var dictionary.
+
+Use by specifying hgr and unt which pulls data from loaded grid data.
+Otherwise pull it from the class dict.
+
+# pybdy.profiler module
+
+> Created on Wed Sep 12 08:02:46 2012.
+
+The main application script for the NRCT.
+
+@author James Harle
+@author John Kazimierz Farey
+@author Srikanth Nagella
+
+## *class* pybdy.profiler.Grid
+
+> Bases: `object`
+
+A Grid object that stores bdy grid information.
+
+### *method* \_\_init\_\_()
+
+## pybdy.profiler.process_bdy(setup_filepath=0, mask_gui=False)
+
+Handle all the calls to generate open boundary conditions for a given regional domain.
+
+This is main entry for processing BDY lateral boundary conditions.
+This is the main script that handles all the calls to generate open
+boundary conditions for a given regional domain. Input options are handled
+in a NEMO style namelist (namelist.bdy). There is an optional GUI allowing
+the user to create a mask that defines the extent of the regional model.
+
+> ### Parameters
+
+> setup_filepath (str) : file path to find namelist.bdy
+> mask_gui (bool): whether use of the GUI is required
+
+> ### Returns
+
+> None : bdy data is written out to NetCDF file
+
+## pybdy.profiler.write_tidal_data(setup_var, dst_coord_var, grid, tide_cons, cons)
+
+Write the tidal data to a NetCDF file.
+
+> ### Parameters
+
+> setup_var (list): Description of arg1
+> dst_coord_var (obj) : Description of arg1
+> grid (dict): Description of arg1
+> tide_cons (list): Description of arg1
+> cons (data): cosz, sinz, cosu, sinu, cosv, sinv
+
+> ### Returns
+
+> None : tidal data is written to NetCDF file
+
+# pybdy.pybdy_exe module
+
+Entry for the project.
+
+> @author: Mr. Srikanth Nagella
+
+## pybdy.pybdy_exe.main()
+
+Run main function.
+
+Checks the command line parameters and passes them to the profile module for processing.
+
+# pybdy.pybdy_ncml_generator module
+
+Created on 2 Jul 2015.
+
+The main application object for hosting the pybdy ncml editor.
+Used for development purposes to display the ncml editor dialog.
+
+> @author: Shirley Crompton, UK Science and Technology Facilities Council
+
+## pybdy.pybdy_ncml_generator.main()
+
+Command line execution method.
+
+Checks the input arguments and passes on to method to open the ncml generator window.
+
+# pybdy.pybdy_settings_editor module
+
+Created on 7 Jan 2015.
+
+> @author: Mr. Srikanth Nagella
+
+## pybdy.pybdy_settings_editor.main()
+
+Command line execution method.
+
+Checks the input arguments and passes on to method to open the settings window.
+
+## pybdy.pybdy_settings_editor.open_settings_dialog(setup)
+
+Start the settings window using the setup settings provided in the input.
+
+On clicking the cancel button it doesn’t shutdown the applicaiton but carries on with the execution.
+
+## pybdy.pybdy_settings_editor.open_settings_window(fname)
+
+Start a Qt application.
+
+This method gives the user the option to pick a namelist.bdy file to edit.
+Once user selects it it will open a dialog box where users can edit the parameters.
+
+# pybdy.version module
+
+## Module contents
+
+a Python based regional NEMO model configuration toolbox.
diff --git a/docs/pybdy.reader.md b/docs/pybdy.reader.md
new file mode 100644
index 00000000..f7c807eb
--- /dev/null
+++ b/docs/pybdy.reader.md
@@ -0,0 +1,185 @@
+# pybdy.reader package
+
+## Submodules
+
+# pybdy.reader.directory module
+
+Abstraction for the data repository.
+
+> @author: Mr. Srikanth Nagella.
+
+## *class* pybdy.reader.directory.GridGroup
+
+> Bases: `object`
+
+### *method* \_\_init\_\_()
+
+### *method* get_meta_data(variable, source_dic)
+
+Return a dictionary with meta data information correspoinding to the variable.
+
+## *class* pybdy.reader.directory.Reader(directory, time_adjust)
+
+> Bases: `object`
+
+Reader for all the files in the directory as one single object.
+
+## Examples
+
+reader = Reader(“Folder path”)
+
+> reader[“t”][“votemper”][:, :, :, :]
+
+### *method* \_\_init\_\_(directory, time_adjust)
+
+Take in directory path as input and return the required information to the bdy.
+
+> ### Parameters
+
+> directory : The directory in which to look for the files
+> time_adjust : amount of time to be adjusted to the time read from file.
+
+> ### Returns
+
+> None : object
+
+### *method* calculate_time_interval()
+
+Calculate the time interval of the each grid.
+
+If all the grids get the same interval then it sets it to the days and hours.
+Otherwise it throws an error.
+
+### *method* delta_time_interval(time1, time2)
+
+Get the difference between the two times in days and hours.
+
+### *method* get_dir_list(grid)
+
+Scan the directory for a input grid related NetCDF files (i.e., ending with the grid name.
+
+> ### Parameters
+
+> grid (str) : grid name eg. ‘t’,’v’,’u’,’i’.
+
+> ### Returns
+
+> dir_list (list) : list of files
+
+### *method* get_source_timedata(grid, t_adjust)
+
+Get the source time data information.
+
+Builds up sourcedata objects of a given grid.
+
+### *method* grid_type_list *= ['t', 'u', 'v', 'i']*
+
+## *class* pybdy.reader.directory.Variable(filenames, variable)
+
+> Bases: `object`
+
+### *method* \_\_init\_\_(filenames, variable)
+
+### *method* get_attribute_values(attr_name)
+
+Return the attribute value of the variable.
+
+### *method* get_dimensions()
+
+Return the dimensions of the variables.
+
+### *method* set_time_dimension_index()
+
+Set the time dimension index.
+
+### *method* time_counter_const *= 'time_counter'*
+
+# pybdy.reader.factory module
+
+Generic file loader factory.
+
+> @author: Mr. Srikanth Nagella
+
+## pybdy.reader.factory.GetFile(uri)
+
+## pybdy.reader.factory.GetReader(uri, t_adjust, reader_type=None)
+
+## *class* pybdy.reader.factory.NetCDFFile(filename)
+
+> Bases: `object`
+
+### *method* \_\_init\_\_(filename)
+
+### *method* close()
+
+# pybdy.reader.ncml module
+
+NcML reading implementation using pyjnius.
+
+> @author: Mr. Srikanth Nagella.
+
+## *class* pybdy.reader.ncml.GridGroup(filename, dataset)
+
+> Bases: `object`
+
+Class that provides an indirection to the grid type.
+
+Since ncml file has aggregation of all the variables this is just a place holder.
+
+### *method* \_\_init\_\_(filename, dataset)
+
+Source data that holds the dataset information.
+
+### *method* get_meta_data(variable, source_dic)
+
+Return a dictionary with meta data information correspoinding to the variable.
+
+### *method* logger *= \*
+
+### *method* update_atrributes()
+
+Update the units and calendar information for the grid.
+
+## *class* pybdy.reader.ncml.NcMLFile(filename)
+
+> Bases: `object`
+
+### *method* \_\_init\_\_(filename)
+
+### *method* close()
+
+## *class* pybdy.reader.ncml.Reader(uri, time_adjust)
+
+> Bases: `object`
+
+High level object for the NCML reader, from here using grid type will return the grid data.
+
+## Examples
+
+reader = Reader(“NCML Filename”)
+
+> reader[“t”][“votemper”][:, :, :, :]
+
+### *method* \_\_init\_\_(uri, time_adjust)
+
+### *method* close()
+
+Not yet implemented.
+
+### *method* grid_type_list *= ['t', 'u', 'v', 'i']*
+
+### *method* time_counter *= 'time_counter'*
+
+## *class* pybdy.reader.ncml.Variable(dataset, variable)
+
+> Bases: `object`
+
+### *method* \_\_init\_\_(dataset, variable)
+
+### *method* get_attribute_value(attr_name)
+
+Return the attribute value of the variable.
+
+## pybdy.reader.ncml.init_jnius()
+
+## Module contents
diff --git a/docs/pybdy.tide.md b/docs/pybdy.tide.md
new file mode 100644
index 00000000..e113fcf0
--- /dev/null
+++ b/docs/pybdy.tide.md
@@ -0,0 +1,157 @@
+# pybdy.tide package
+
+## Submodules
+
+# pybdy.tide.fes2014_extract_HC module
+
+Fes extraction of harmonic constituents.
+
+Extract the tidal harmonic constants out of a tidal model for a given locations
+[amp,gph] = fes2014_extract_HC(Model,lat,lon,type,Cid).
+
+Modified from tpxo_extract_HC.py
+
+3 Nov 2017
+jelt
+
+## *class* pybdy.tide.fes2014_extract_HC.FesExtract(settings, lat, lon, grid_type)
+
+> Bases: `object`
+
+FES model extract of harmonic constituents.
+
+The source FES data are stored in one-file-per-constituent
+Note the FES data are structured with lat and lon reversed relative to TPXO
+I.e. FES(lat,lon)
+c.f. TPXO7(con,lon,lat).
+c.f. TPXO9(lon,lat)
+
+Note the FES heights are in cm (need to convert to metres)
+The momentum vector quantities are depth integrated TRANSPORTS (m^2/s).
+In TPXO7 both transport (m^2/s) and velocies (cm/s) are given.
+In TPXO9 only transport (cm^2/s) are given.
+Here we use the transport fluxes.
+
+### *method* \_\_init\_\_(settings, lat, lon, grid_type)
+
+Initialise the Extract of tide information from the netcdf Tidal files.
+
+### *method* interpolate_constituents(amp_fes, pha_fes, lon_fes, lat_fes, lon, lat)
+
+Interpolates the tidal constituents along the given lat lon coordinates.
+
+## pybdy.tide.fes2014_extract_HC.bilinear_interpolation(lon, lat, data, lon_new, lat_new)
+
+Perform a bilinear interpolation of grid where the data values are NaN’s.
+
+## pybdy.tide.fes2014_extract_HC.interpolate_data(lon, lat, data, mask, lonlat)
+
+Interpolate data data on regular grid for given lonlat coordinates.
+
+# pybdy.tide.nemo_bdy_tide module
+
+## *class* pybdy.tide.nemo_bdy_tide.Extract(setup, DstCoord, Grid)
+
+> Bases: `object`
+
+### *method* \_\_init\_\_(setup, DstCoord, Grid)
+
+### *method* extract_con(con)
+
+# pybdy.tide.nemo_bdy_tide3 module
+
+Module to extract constituents for the input grid mapped onto output grid.
+
+> @author: Mr. Srikanth Nagella
+
+## pybdy.tide.nemo_bdy_tide3.constituents_index(constituents, inputcons)
+
+Convert the input contituents to index in the tidal constituents.
+
+> ### Parameters
+
+> constituents: The list of constituents available from the source data
+> : e.g. TPXO: [‘m2’, ‘s2’, ‘n2’, ‘k2’, ‘k1’, ‘o1’, ‘p1’, ‘q1’, ‘mf’, ‘mm’, ‘m4’, ‘ms4’, ‘mn4’]
+
+> inputcons: The dictionary of constituents from the namelist with their numbers
+> : e.g. {‘1’: “‘M2’”, ‘3’: “‘K2’”, ‘2’: “‘S2’”, ‘4’: “‘M4’”}
+
+> ### Returns
+
+> retindx: The indices (relative to the source data list) of the dictionary items from the namelist
+> : e.g. [ 0. 3. 1. 10.]
+
+## pybdy.tide.nemo_bdy_tide3.nemo_bdy_tide_rot(setup, DstCoord, Grid_T, Grid_U, Grid_V, comp)
+
+Global Tidal model interpolation onto target grid, including grid rotation.
+
+> ### Parameters
+
+> setup : settings
+> DstCoord : destination coordinate object
+> Grid_T : t grid bdy_i, grid_type, bdy_r
+> Grid_U : u grid bdy_i, grid_type, bdy_r
+> Grid_V : v grid bdy_i, grid_type, bdy_r
+> comp : dictionary of harmonics read from namelist {‘1’:”M2” , ‘2’:””}
+
+> ### Returns
+
+> cosz, sinz, cosu, sinu, cosv, sinv: [of constituents, number of bdy points]
+
+# pybdy.tide.nemo_bdy_tide_ncgen module
+
+Create a Tide netcdf file ready for population.
+
+> @author: Mr. Srikanth Nagella
+
+## pybdy.tide.nemo_bdy_tide_ncgen.CreateBDYTideNetcdfFile(filename, xb_len, x_len, y_len, h, fv, grd)
+
+# pybdy.tide.tpxo_extract_HC module
+
+Extract the tidal harmonic constants out of a tidal model for given locations.
+
+[amp,Gph] = tpxo_extract_HC(Model,lat,lon,type,Cid).
+
+> original author: Mr. Srikanth Nagella
+
+TPXO data has a grid file and then data file for harmonic heights and harmonic currents
+In TPXO7.2 the resolution was sufficiently low that all the harmonics could be bundled together
+In TPXO9v5 the resolution increased such that separate files are issued for each constituent
+
+Files are processed in real and imaginary parts as they are easier to interpolate.
+
+## *class* pybdy.tide.tpxo_extract_HC.TpxoExtract(settings, lat, lon, grid_type)
+
+> Bases: `object`
+
+TPXO model extract_hc.c implementation in python.
+
+### *method* \_\_init\_\_(settings, lat, lon, grid_type)
+
+Initialise the Extract of tide information from the netcdf Tidal files.
+
+### *method* generate_landmask_from_bathymetry(bathy_name)
+
+Create a boolean mask xr.DataArray from bathymetry.
+
+TPXO7.2 carries a binary variable called mask and a bathymetry variable
+TPXO9v5 only carries the bathymetry variable
+
+> return: mask dataarray.
+
+> Useage:
+> : self.grid[mask_name] = generate_landmask(bathy_name)
+
+### *method* interpolate_constituents(nc_dataset, real_var_name, img_var_name, lon_var_name, lat_var_name, lon, lat, height_data=None, maskname=None)
+
+Interpolate the tidal constituents along the given lat lon coordinates.
+
+## pybdy.tide.tpxo_extract_HC.bilinear_interpolation(lon, lat, data, lon_new, lat_new)
+
+Do a bilinear interpolation of grid where the data values are NaN’s.
+
+## pybdy.tide.tpxo_extract_HC.interpolate_data(lon, lat, data, mask, lonlat)
+
+Interpolate data data on regular grid for given lonlat coordinates.
+
+## Module contents
diff --git a/docs/pybdy.utils.md b/docs/pybdy.utils.md
new file mode 100644
index 00000000..3effa393
--- /dev/null
+++ b/docs/pybdy.utils.md
@@ -0,0 +1,124 @@
+# pybdy.utils package
+
+## Submodules
+
+# pybdy.utils.Constants module
+
+File with all the constants that will be used.
+
+> @author: Mr. Srikanth Nagella
+
+# pybdy.utils.e3_to_depth module
+
+Function e3_to_depth.
+
+> Purpose : compute t- & w-depths of model levels from e3t & e3w scale factors
+> Method : The t- & w-depth are given by the summation of e3w & e3t, resp.
+> Action : pe3t, pe3w : scale factor of t- and w-point (m)
+> Useage: [gdept, gdepw] = e3_to_depth(e3t, e3w, nz).
+
+## pybdy.utils.e3_to_depth.e3_to_depth(pe3t, pe3w, jpk)
+
+# pybdy.utils.gcoms_break_depth module
+
+Rewritting the break depth implementation from matlab version.
+
+> @author: Mr. Srikanth Nagella
+
+## pybdy.utils.gcoms_break_depth.gcoms_boundary_masks(bathy, ov, lv)
+
+\_Summary.
+
+> ### Parameters
+
+> - **type bathy:**
+> - **param bathy:**
+> This is the input bathymetry data
+> - **type ov:**
+> - **param ov:**
+> Latittude array
+> - **type lv:**
+> - **param lv:**
+> Longitude array
+> - **type bathy:**
+> numpy array
+> - **type ov:**
+> numpy array
+> - **type lv:**
+> numpy array
+> - **return:**
+> returns the ob, lb
+> - **rtype:**
+> numpy arrays
+> - **Example:**
+
+## pybdy.utils.gcoms_break_depth.gcoms_break_depth(bathy)
+
+Create a mask for the break depth using histograms.
+
+## pybdy.utils.gcoms_break_depth.polcoms_select_domain(bathy, lat, lon, roi, dr)
+
+Calculate the shelf break.
+
+> - **Parameters**
+
+- **bathy** (*numpy array*) – This is the input bathymetry data
+- **lat** (*numpy array*) – Latittude array
+- **lon** (*numpy array*) – Longitude array
+- **roi** (*python array*) – region of interest array [4]
+- **dr** (*float*) – shelf break distance
+
+> - **Returns**
+> returns the depth_shelf, h_max
+> - **Return type:**
+> numpy arrays.
+> - **Example:**
+
+# pybdy.utils.nemo_bdy_lib module
+
+Library of some functions used by multiple classes.
+
+Written by John Kazimierz Farey, Sep 2012.
+
+## pybdy.utils.nemo_bdy_lib.bdy_sections(nbidta, nbjdta, nbrdta, rw)
+
+Extract individual byd sections.
+
+## pybdy.utils.nemo_bdy_lib.bdy_transport()
+
+Calculate transport across individual bdy sections.
+
+## pybdy.utils.nemo_bdy_lib.dist(self, x, y)
+
+Return the distance between two points.
+
+## pybdy.utils.nemo_bdy_lib.dist_point_to_segment(p, s0, s1)
+
+Get the distance of a point to a segment.
+
+*p*, *s0*, *s1* are *xy* sequences
+This algorithm from
+
+> [http://geomalgorithms.com/a02-\_lines.html](http://geomalgorithms.com/a02-_lines.html).
+
+## pybdy.utils.nemo_bdy_lib.get_output_filename(setup_var, year, month, var_type)
+
+Return a output filename constructed for a given var_type, year and month.
+
+## pybdy.utils.nemo_bdy_lib.get_output_tidal_filename(setup_var, const_name, grid_type)
+
+Return a output filename constructed for a given tidal constituent and grid type.
+
+## pybdy.utils.nemo_bdy_lib.psi_field(U, V)
+
+## pybdy.utils.nemo_bdy_lib.rot_rep(pxin, pyin, dummy, cd_todo, gcos, gsin)
+
+Rotate function.
+
+## pybdy.utils.nemo_bdy_lib.sub2ind(shap, subx, suby)
+
+Subscript to index of a 1d array.
+
+## pybdy.utils.nemo_bdy_lib.velocity_field(psi)
+
+## Module contents
diff --git a/docs/source/_static/add-variable.jpg b/docs/source/_static/add-variable.jpg
deleted file mode 100644
index 8efe0d74..00000000
Binary files a/docs/source/_static/add-variable.jpg and /dev/null differ
diff --git a/docs/source/_static/eg1.png b/docs/source/_static/eg1.png
deleted file mode 100644
index 37a6e29b..00000000
Binary files a/docs/source/_static/eg1.png and /dev/null differ
diff --git a/docs/source/_static/eg2.png b/docs/source/_static/eg2.png
deleted file mode 100644
index 3db45761..00000000
Binary files a/docs/source/_static/eg2.png and /dev/null differ
diff --git a/docs/source/_static/generator-gui.jpg b/docs/source/_static/generator-gui.jpg
deleted file mode 100644
index 9d77bbab..00000000
Binary files a/docs/source/_static/generator-gui.jpg and /dev/null differ
diff --git a/docs/source/_templates/.gitkeep b/docs/source/_templates/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/docs/source/conf.py b/docs/source/conf.py
deleted file mode 100644
index aa4bea63..00000000
--- a/docs/source/conf.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# Configuration file for the Sphinx documentation builder.
-#
-# This file only contains a selection of the most common options. For a full
-# list see the documentation:
-# https://www.sphinx-doc.org/en/master/usage/configuration.html
-
-# -- Import and path setup ---------------------------------------------------
-
-import os
-import sys
-
-import pybdy
-
-sys.path.insert(0, os.path.abspath("../"))
-
-# -- Project information -----------------------------------------------------
-
-project = "pyBDY"
-copyright = "2023, NOC"
-author = "NOC"
-version = pybdy.__version__
-release = pybdy.__version__
-
-# -- General configuration ---------------------------------------------------
-
-# Add any Sphinx extension module names here, as strings. They can be
-# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
-# ones.
-extensions = [
- "autoapi.extension",
- "myst_parser",
- "sphinx.ext.autodoc",
- "sphinx.ext.napoleon",
-]
-
-# autodoc configuration
-autodoc_typehints = "none"
-
-# autoapi configuration
-autoapi_dirs = ["../../src/pybdy"]
-autoapi_ignore = ["*/version.py"]
-autoapi_options = [
- "members",
- "inherited-members",
- "undoc-members",
- "show-inheritance",
- "show-module-summary",
- "imported-members",
-]
-autoapi_root = "_api"
-
-# napoleon configuration
-napoleon_google_docstring = False
-napoleon_numpy_docstring = True
-napoleon_preprocess_types = True
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ["_templates"]
-
-# List of patterns, relative to source directory, that match files and
-# directories to ignore when looking for source files.
-# This pattern also affects html_static_path and html_extra_path.
-exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
-
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = "sphinx"
-
-# -- Options for HTML output -------------------------------------------------
-
-# The theme to use for HTML and HTML Help pages. See the documentation for
-# a list of builtin themes.
-#
-html_theme = "sphinx_rtd_theme"
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ["_static"]
diff --git a/docs/source/examples.rst b/docs/source/examples.rst
deleted file mode 100644
index 49b0cbae..00000000
--- a/docs/source/examples.rst
+++ /dev/null
@@ -1,354 +0,0 @@
-Examples
-========
-Here we provide two worked examples using pyBDY. The first is a setup of the Northwest European Shelf using
-a remote dataset. The second is an end-to-end setup of a small regional model in the tropics.
-
-Example 1: Northwest European Shelf
-===================================
-
-
-.. figure:: _static/eg1.png
- :align: center
-
- Northwest European Shelf Bathymetry
-
-
-This example has been tested on the ARCHER HPC facillity *(22 Feb 2017)*.
-
-First, create a working directory into which the code can
-run. All the data required for this example are held on a
-THREDDS server so no addtional data are required.
-
-.. note:: make sure cray-netcdf-hdf5parallel cray-hdf5-parallel are loaded.
- This example has been consructed under PrgEnv-intel. e.g.
-
-::
-
- module swap PrgEnv-cray PrgEnv-intel
- module load cray-netcdf-hdf5parallel
- module load cray-hdf5-parallel
-
-.. note:: Be careful to avoid symbolic links in NEMO control files.
-
-::
-
- cd $WDIR
- mkdir OUTPUT
-
-Now we're ready to generate the boundary conditions using pyBDY.
-If this is not installed follow the `installation guide` or a quick
-setup could be as follows:
-
-::
-
- cd ~
- module load anaconda
- conda create --name pynemo_env scipy=0.16.0 numpy matplotlib=1.5.1 basemap netcdf4 libgfortran=1.0.0
- source activate pynemo_env
- conda install -c conda-forge seawater=3.3.4
- conda install -c https://conda.anaconda.org/srikanthnagella thredds_crawler
- conda install -c https://conda.anaconda.org/srikanthnagella pyjnius
- export LD_LIBRARY_PATH=/opt/java/jdk1.7.0_45/jre/lib/amd64/server:$LD_LIBRARY_PATH
- svn checkout https://ccpforge.cse.rl.ac.uk/svn/pynemo
- cd pynemo/trunk/Python
- python setup.py build
- export PYTHONPATH=~/.conda/envs/pynemo/lib/python2.7/site-packages/:$PYTHONPATH
- python setup.py install --prefix ~/.conda/envs/pynemo
- cp data/namelist.bdy $WDIR
- cd $WDIR
-
-Next we need to modify the namelist.bdy file to point it to the correct
-data sources. First we need to create an ncml file to gather input data
-and map variable names. First we update *sn_src_dir*, *sn_dst_dir* and
-*cn_mask_file* to reflect the working path (e.g. sn_src_dir = '$WDIR/test.ncml',
-sn_dst_dir = '$WDIR/OUTPUT' and cn_mask_file = '$WDIR/mask.nc').
-Explicitly write out $WDIR. Next we need to generate test.ncml.
-
-.. note:: pynemo may have to be run on either espp1 or espp2 (e.g. ssh -Y espp1)
- as the JVM doesn't have sufficient memory on the login nodes.
-
-::
-
- ssh -Y espp1
- module load anaconda
- source activate pynemo_env
- cd $WDIR
- pynemo_ncml_generator
-
-For each of the tracer and dynamics variables enter the following URL as
-the source directory:
-
-http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data
-
-Add a regular expression for each (Temperature, Salinity and Sea Surface
-Height each use: .\*T\\.nc$ and the velocities use .\*V\\.nc$ and .\*V\\.nc$)
-After each entry click the Add button. Finally fill in the output file
-including directory path (this should match *sn_src_dir*). Once this is complete
-click on the generate button and an ncml file should be written to $WDIR.
-
-Then using pynemo we define the area we want to model and generate some
-boundary conditions:
-
-.. note:: I've had to add the conda env path to the $PYTHONPATH as python does
- seem to be able to pick up pyjnius!?
-
-::
-
- export LD_LIBRARY_PATH=/opt/java/jdk1.7.0_45/jre/lib/amd64/server:$LD_LIBRARY_PATH
- export PYTHONPATH=~/.conda/envs/pynemo_env/lib/python2.7/site-packages:$PYTHONPATH
- pynemo -g -s namelist.bdy
-
-Once the area of interest is selected and the close button is clicked,
-open boundary data should be generated in $WDIR/OUTPUT.
-
-
-Example 2: Lighthouse Reef
-==========================
-
-.. figure:: _static/eg2.png
- :align: center
-
- Regional Mask / SSH after 1 day / SST after 1 day
-
-
-This example has been tested on the ARCHER HPC facillity.
-
-First, create a working directory into which the NEMO
-source code can be checked out. Create an inputs directory
-to unpack the forcing tar ball.
-
-.. note:: make sure cray-netcdf-hdf5parallel cray-hdf5-parallel are loaded.
- This example has been consructed under PrgEnv-intel.
-
-::
-
- cd $WDIR
- mkdir INPUTS
- cd INPUTS
- wget ftp.nerc-liv.ac.uk:/pub/general/jdha/inputs.tar.gz
- tar xvfz inputs.tar.gz
- rm inputs.tar.gz
- cd ../
- svn co http://forge.ipsl.jussieu.fr/nemo/svn/branches/2014/dev_r4621_NOC4_BDY_VERT_INTERP@5709
- svn co http://forge.ipsl.jussieu.fr/ioserver/svn/XIOS/branchs/xios-1.0@629
- cd xios-1.0
- cp $WDIR/INPUTS/arch-XC30_ARCHER.* ./arch
- ./make_xios --full --prod --arch XC30_ARCHER --netcdf_lib netcdf4_par
-
-Next we setup our experiment directory and drop an updated
-dtatsd.F90 into MY_SRC to allow the vertical interpolation
-of initial conditions on to the new verictal coordinates.
-We also apply several patches for bugs in the code.
-
-.. note:: when executing ./makenemo for the first time only choose OPA_SRC.
- For some reason even though LIM_2 is not chosen key_lim2 is
- in the cpp keys. This means the first call to ./makenemo will fail.
- Just vi LH_REEF/cpp_LH_REEF.fcm and remove key_lim2 and re-issue
- the make command.
-
-::
-
- export CDIR=$WDIR/dev_r4621_NOC4_BDY_VERT_INTERP/NEMOGCM/CONFIG
- export TDIR=$WDIR/dev_r4621_NOC4_BDY_VERT_INTERP/NEMOGCM/TOOLS
- cd $CDIR/../NEMO/OPA_SRC/SBC
- patch -b < $WDIR/INPUTS/fldread.patch
- cd ../DOM
- patch -b < $WDIR/INPUTS/dommsk.patch
- cd ../BDY
- patch -b < $WDIR/INPUTS/bdyini.patch
- cd $CDIR
- rm $CDIR/../NEMO/OPA_SRC/TRD/trdmod.F90
- cp $WDIR/INPUTS/arch-* ../ARCH
- ./makenemo -n LH_REEF -m XC_ARCHER_INTEL -j 10
- cp $WDIR/INPUTS/cpp_LH_REEF.fcm ./LH_REEF
- cp $WDIR/INPUTS/dtatsd.F90 LH_REEF/MY_SRC/
-
-To generate bathymetry, initial conditions and grid information
-we first need to compile some of the NEMO TOOLS (after a small
-bugfix - and to allow direct passing of arguments). For some
-reason GRIDGEN doesn't like INTEL:
-
-::
-
- cd $WDIR/dev_r4621_NOC4_BDY_VERT_INTERP/NEMOGCM/TOOLS/WEIGHTS/src
- patch -b < $WDIR/INPUTS/scripinterp_mod.patch
- patch -b < $WDIR/INPUTS/scripinterp.patch
- patch -b < $WDIR/INPUTS/scrip.patch
- patch -b < $WDIR/INPUTS/scripshape.patch
- patch -b < $WDIR/INPUTS/scripgrid.patch
- cd ../../
- ./maketools -n WEIGHTS -m XC_ARCHER_INTEL
- ./maketools -n REBUILD_NEMO -m XC_ARCHER_INTEL
- module unload cray-netcdf-hdf5parallel cray-hdf5-parallel
- module swap PrgEnv-intel PrgEnv-cray
- module load cray-netcdf cray-hdf5
- ./maketools -n GRIDGEN -m XC_ARCHER
- module swap PrgEnv-cray PrgEnv-intel
- export TDIR=$WDIR/dev_r4621_NOC4_BDY_VERT_INTERP/NEMOGCM/TOOLS
-
-.. note:: my standard ARCHER ENV is intel with parallel netcdf you may need to edit accordingly
-
-Back in $WDIR/INPUTS, create a new coordinates file from the
-existing global 1/12 mesh and refine to 1/84 degree resolution:
-
-::
-
- cd $TDIR/GRIDGEN
- cp $WDIR/INPUTS/namelist_R12 ./
- ln -s namelist_R12 namelist.input
- ./create_coordinates.exe
- cp 1_coordinates_ORCA_R12.nc $WDIR/INPUTS/coordinates.nc
-
-To create the bathymetry we use the gebco dataset. On ARCHER I
-had to use a non-default nco module for netcdf operations to work.
-I also had to cut down the gebco data as the SCRIP routines failed
-for some unknown reason.
-
-::
-
- cd $WDIR/INPUTS
- module load nco/4.5.0
- ncap2 -s 'where(topo > 0) topo=0' gebco_1_cutdown.nc tmp.nc
- ncflint --fix_rec_crd -w -1.0,0.0 tmp.nc tmp.nc gebco_in.nc
- rm tmp.nc
- module unload nco cray-netcdf cray-hdf5
- module load cray-netcdf-hdf5parallel cray-hdf5-parallel
- $TDIR/WEIGHTS/scripgrid.exe namelist_reshape_bilin_gebco
- $TDIR/WEIGHTS/scrip.exe namelist_reshape_bilin_gebco
- $TDIR/WEIGHTS/scripinterp.exe namelist_reshape_bilin_gebco
-
-We perform a similar operation to create the initial conditions:
-
-.. note:: I've put a sosie pre-step in here to flood fill the land.
- I tried using sosie for 3D intepolation, but not convinced.
-
-::
-
- cd ~
- mkdir local
- svn co svn://svn.code.sf.net/p/sosie/code/trunk sosie
- cd sosie
- cp $WDIR/INPUTS/make.macro ./
- make
- make install
- export PATH=~/local/bin:$PATH
- cd $WDIR/INPUTS
- sosie.x -f initcd_votemper.namelist
- sosie.x -f initcd_vosaline.namelist
- $TDIR/WEIGHTS/scripgrid.exe namelist_reshape_bilin_initcd_votemper
- $TDIR/WEIGHTS/scrip.exe namelist_reshape_bilin_initcd_votemper
- $TDIR/WEIGHTS/scripinterp.exe namelist_reshape_bilin_initcd_votemper
- $TDIR/WEIGHTS/scripinterp.exe namelist_reshape_bilin_initcd_vosaline
-
-Finally we setup weights files for the atmospheric forcing:
-
-::
-
- $TDIR/WEIGHTS/scripgrid.exe namelist_reshape_bilin_atmos
- $TDIR/WEIGHTS/scrip.exe namelist_reshape_bilin_atmos
- $TDIR/WEIGHTS/scripshape.exe namelist_reshape_bilin_atmos
- $TDIR/WEIGHTS/scrip.exe namelist_reshape_bicubic_atmos
- $TDIR/WEIGHTS/scripshape.exe namelist_reshape_bicubic_atmos
-
-
-Next step is to create the mesh and mask files that will be used
-in the generation of the open boundary conditions:
-
-::
-
- cd $CDIR
- cp $WDIR/INPUTS/cpp_LH_REEF.fcm LH_REEF/
- ln -s $WDIR/INPUTS/bathy_meter.nc $CDIR/LH_REEF/EXP00/bathy_meter.nc
- ln -s $WDIR/INPUTS/coordinates.nc $CDIR/LH_REEF/EXP00/coordinates.nc
- cp $WDIR/INPUTS/runscript $CDIR/LH_REEF/EXP00
- cp $WDIR/INPUTS/namelist_cfg $CDIR/LH_REEF/EXP00/namelist_cfg
- cp $WDIR/INPUTS/namelist_ref $CDIR/LH_REEF/EXP00/namelist_ref
- ./makenemo clean
- ./makenemo -n LH_REEF -m XC_ARCHER_INTEL -j 10
- cd LH_REEF/EXP00
- ln -s $WDIR/xios-1.0/bin/xios_server.exe xios_server.exe
- qsub -q short runscript
-
-
-If that works, we then need to rebuild the mesh and mask files in
-to single files for the next step:
-
-::
-
- $TDIR/REBUILD_NEMO/rebuild_nemo -t 24 mesh_zgr 96
- $TDIR/REBUILD_NEMO/rebuild_nemo -t 24 mesh_hgr 96
- $TDIR/REBUILD_NEMO/rebuild_nemo -t 24 mask 96
- mv mesh_zgr.nc mesh_hgr.nc mask.nc $WDIR/INPUTS
- rm mesh_* mask_* LH_REEF_0000*
- cd $WDIR/INPUTS
-
-Now we're ready to generate the boundary conditions using pyBDY.
-If this is not installed follow the `installation guide` or a quick
-setup could be as follows:
-
-::
-
- cd ~
- module load anaconda
- conda create --name pynemo_env scipy=0.16.0 numpy matplotlib=1.5.1 basemap netcdf4 libgfortran=1.0.0
- source activate pynemo_env
- conda install -c conda-forge seawater=3.3.4
- conda install -c https://conda.anaconda.org/srikanthnagella thredds_crawler
- conda install -c https://conda.anaconda.org/srikanthnagella pyjnius
- export LD_LIBRARY_PATH=/opt/java/jdk1.7.0_45/jre/lib/amd64/server:$LD_LIBRARY_PATH
- svn checkout https://ccpforge.cse.rl.ac.uk/svn/pynemo
- cd pynemo/trunk/Python
- python setup.py build
- export PYTHONPATH=~/.conda/envs/pynemo/lib/python2.7/site-packages/:$PYTHONPATH
- python setup.py install --prefix ~/.conda/envs/pynemo
- cd $WDIR/INPUTS
-
-Start up pynemo and generate boundary conditions. First we need to
-create a few ncml files to gather input data and map variable names.
-Then using pynemo we define the area we want to model:
-
-.. note:: pynemo may have to be run on either espp1 or espp2 (e.g. ssh -Y espp1)
- as the JVM doesn't have sufficient memory on the login nodes.
-
-::
-
- ssh -Y espp1
- module load anaconda
- source activate pynemo_env
- cd $WDIR/INPUTS
- pynemo_ncml_generator
-
-.. note:: The ncml files already exist in the INPUTS directory. There is no need
- generate them. It's a little tricky at the momment as the ncml generator
- doesn't have all the functionality required for this example. Next step
- is to fire up pynemo. You can change the mask or accept the default by just
- hitting the close button (that really should say 'build' or 'go' or such like).
- Also I've had to add the conda env path to the $PYTHONPATH as python does
- seem to be able to pick up pyjnius!?
-
-::
-
- export LD_LIBRARY_PATH=/opt/java/jdk1.7.0_45/jre/lib/amd64/server:$LD_LIBRARY_PATH
- export PYTHONPATH=~/.conda/envs/pynemo_env/lib/python2.7/site-packages:$PYTHONPATH
- pynemo -g -s namelist.bdy
-
-Let's have a go at running the model after exiting espp1 (after a few variable
-renamings, due to inconsistencies to be ironed out):
-
-::
-
- exit
- cd $WDIR/INPUTS
- module unload cray-netcdf-hdf5parallel cray-hdf5-parallel
- module load nco/4.5.0
- ncrename -v deptht,gdept LH_REEF_bdyT_y1980m01.nc
- ncrename -v depthu,gdepu LH_REEF_bdyU_y1980m01.nc
- ncrename -v depthv,gdepv LH_REEF_bdyV_y1980m01.nc
- module unload nco
- module load cray-netcdf-hdf5parallel cray-hdf5-parallel
- cd $CDIR/LH_REEF/EXP00
- ln -s $WDIR/INPUTS/coordinates.bdy.nc $CDIR/LH_REEF/EXP00/coordinates.bdy.nc
- sed -e 's/nn_msh = 3/nn_msh = 0/' namelist_cfg > tmp
- sed -e 's/nn_itend = 1/nn_itend = 1440 /' tmp > namelist_cfg
- cp $WDIR/INPUTS/*.xml ./
- qsub -q short runscript
diff --git a/docs/source/index.rst b/docs/source/index.rst
deleted file mode 100644
index ff9db74c..00000000
--- a/docs/source/index.rst
+++ /dev/null
@@ -1,23 +0,0 @@
-NRCT User Guide
-=================
-
-Contents:
-
-.. toctree::
- :maxdepth: 3
-
- intro
- installation
- usage
- ncml_generator_usage
- examples
- troubleshooting
-
-
-
-Indices and tables
-==================
-
-* :ref:`genindex`
-* :ref:`modindex`
-* :ref:`search`
diff --git a/docs/source/installation.rst b/docs/source/installation.rst
deleted file mode 100644
index dd8b6cee..00000000
--- a/docs/source/installation.rst
+++ /dev/null
@@ -1,52 +0,0 @@
-Installation
-============
-This page provides a guide to installing pyBDY.
-
-Dependencies
-^^^^^^^^^^^^
-
-1. Python 2.7 (Not tested with 3.x)
-2. scipy
-3. netCDF4-python
-4. numpy
-5. matplotlib
-6. basemap
-7. thredds_crawler
-8. seawater
-9. pyjnius (optional)
-
-Anaconda
-^^^^^^^^
-
-Using conda: pyBDY supports Win64, OSX and Linux. for other operating systems please build from source.
-
-.. note:: It is recommended to create a seperate virtual environment for pyBDY.
- Please follow the instructions on doing this at http://www.continuum.io/blog/conda
-
-::
-
- conda install -c https://conda.anaconda.org/srikanthnagella pybdy
-
-This will install pyBDY and its dependencies. This build is generally outdated as development and
-bug fixes to the source are a regular occurrence. It may be better to install from source until a beta
-release is available.
-
-From Source
-^^^^^^^^^^^
-
-Installing pyBDY using other flavours of software or from source. Install all the dependencies and
-download the source code from svn and install.
-
-::
-
- svn checkout http://ccpforge.cse.rl.ac.uk/svn/pynemo/trunk/Python/
- python setup.py install
-
-.. note:: If building from source in the Anaconda environment all dependencies can
- be installed using conda apart from thredds_crawler and pyjnius which can
- be installed using the following Anaconda channel:
-
-::
-
- conda install -c https://conda.anaconda.org/srikanthnagella thredds_crawler
- conda install -c https://conda.anaconda.org/srikanthnagella pyjnius
diff --git a/docs/source/intro.rst b/docs/source/intro.rst
deleted file mode 100644
index 61b84cce..00000000
--- a/docs/source/intro.rst
+++ /dev/null
@@ -1,21 +0,0 @@
-Introduction
-============
-
-
-The NRCT is a tool to set up the lateral boundary conditions for a regional `NEMO `_
-model configuration. The tool is written in Python, largely within the
-`Anaconda `_ environment to aid
-wider distribution and to facilitate development. In their current form these
-tools are by no means generic and polished, but it is hoped will form a foundation
-from which something more formal can be developed. The following sections provide a quick-start guide with
-worked examples to help the user get up and running with the tool.
-
-The tool essentially uses geographical and depth information from the source
-data (e.g. a global ocean simulation) and destination simulation (i.e. the
-proposed regional NEMO model configuration) to determine which source points are required
-for data extraction. This is done using a kdtree approximate nearest neighbour
-algorithm. The idea behind this targetted method is that it provides a generic
-method of interpolation for any flavour of ocean model in order to set up a
-regional NEMO model configuration. At present (alpha release) the tools do not
-contain many options, but those that exist are accessed either through a NEMO style
-namelist or a convient GUI.
diff --git a/docs/source/ncml_generator_usage.rst b/docs/source/ncml_generator_usage.rst
deleted file mode 100644
index 259c318a..00000000
--- a/docs/source/ncml_generator_usage.rst
+++ /dev/null
@@ -1,96 +0,0 @@
-pyBDY NcML Generator Usage
-===========================
-
-This GUI tool facilitates the creation of a virtual dataset for input into pyBDY. The virtual dataset is defined using NetCDF Markup Language (`NcML `_ ).
-
-Using NcML, it is possible to:
-
-1. modify metadata
-2. modify and restructure variables
-3. combine or aggregate data from multiple datasets. The datasets may reside in the local file system or in a remote OPeNDAP (http://www.opendap.org/) server.
-
-
-Generator GUI
-^^^^^^^^^^^^^
-
-.. _NcML_gui:
-.. figure:: _static/generator-gui.jpg
- :align: center
-
- Overview of the NcML Generator GUI.
-
-
-Users need to follow three distinct steps when using the GUI to generate the virtual dataset:
-
-1. Define a target output NcML file
-2. Define the individal variable
-3. Generate the NcML file
-
-Define a Target Output File
----------------------------
-
-User should provide the path and name of the target NcML file. The convention is to use *.ncml* as the file suffix. The target file can be specified manually using the input text box or visually using the *Select file* button. Clicking the button will bring up a file dialogue.
-
-
-Define the Individual Data Variable
------------------------------------
-
-The nemo data variables are grouped into the following types :
-
-1. Tracer (temperature, salinity)
-2. Dynamics (zonal velocity, meridian velocity, sea surface height)
-3. Ice (ice thickness, leads fraction, snow thickness)
-4. Ecosystem (reserved for future use)
-5. Grid (reserved for future use)
-
-Users can access the required variable by selecting the tab widget and the variable from the *Variable* dropdown list.
-
-.. _NcML_gui_completed:
-.. figure:: _static/add-variable.jpg
- :align: center
-
- Example definition of the *Ice thickness variable*.
-
-For each variable, users must provide information for:
-
-* Source directory - the location of the folder containing the input datasets. User can provide an absolute path to a local file folder or an OPeNDAP endpoint, e.g. http://esurgeod.noc.soton.ac.uk:8080/thredds/dodsC/PyNEMO/data/
-* Existing variable name - name used in the source datasets
-
-Users may further filter the source datasets using:
-
-* Include subdirs - check the box to include contents in the sub directories under the specified *Source directory*
-* Regular expression - provides a search pattern for filtering the files. See the **Regex** section below for more information.
-
-After completing the variable form, users should click the *Add* button to store the input value. Alternatively, users can use the *Reset* button to reset the input to the previously saved values. If there are no existing values, the variable tab will be reset to the default state.
-
-Generate the NcML file
-----------------------
-
-After adding all the variables, users can generate the NcML file by clicking the *Generate* button. If the operation is successful, a pop-up confirmation dialogue will appear. The generated NcML file can then be used in the bdy file to set up the NEMO simulation.
-
-
-Regular Expression (Regex)
-^^^^^^^^^^^^^^^^^^^^^^^^^^
-Regular expression is a special text string for describing a search pattern to match against some text. You may compare using regex to filter what files to include in your datasets against using wildcard (*) to specify a file search pattern in your computer.
-
-A detailed description of how to define regular expression for filtering datasets in NcML is available at http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/ncml/AnnotatedSchema4.html#regexp.
-
-The following table provides some typical examples of regex:
-
-+---------------+------------------------+---------------------------+
-| Regex | Matching File Path | Description |
-+===============+========================+===========================+
-| .*V\\.nc$ | c:/dir/dir/dir/abcV.nc | The file path ends in |
-+---------------+------------------------+---------------------------+
-| | d:/muV.nc | V.nc |
-+---------------+------------------------+---------------------------+
-| .*\\.nc$ | c:/dir/dir/dir/\*.nc | The file suffix is nc |
-+---------------+------------------------+---------------------------+
-| | d:/\*.nc | |
-+---------------+------------------------+---------------------------+
-|.*/2015.*\\.nc$| c:/dir/2015_01_16.nc | The file path contains |
-+---------------+------------------------+---------------------------+
-| | d:/2015*.nc | 2015 and the file suffix |
-+---------------+------------------------+---------------------------+
-| | e:/a/b/c/20151106T.nc | is nc |
-+---------------+------------------------+---------------------------+
diff --git a/docs/source/troubleshooting.rst b/docs/source/troubleshooting.rst
deleted file mode 100644
index e1b0900f..00000000
--- a/docs/source/troubleshooting.rst
+++ /dev/null
@@ -1,14 +0,0 @@
-Troubleshooting
-===============
-
-1. pyBDY crashing in MacOSX (Yosemite)?
-
-* Downgrade the scipy package to 0.15
-
-2. How to make pyBDY to work behind firewall/proxy?
-
-* Set the environment variable http_proxy. eg. in Linux export http_proxy=:
-
-3. Getting this error 'Warning: Please make sure pyjnius is installed and jvm.dll/libjvm.so/libjvm.dylib is in the path' ?
-
-* This error is displayed when the application cannot find the java installation on the local machine. please install a java 7.x runtime from http://www.oracle.com/technetwork/java/javase/downloads/jre7-downloads-1880261.html and append the path to the library in the system path. eg. on windows SET PATH="C:\\Program Files (x86)\\Java\\jre1.7\\bin\\client" on Linux in shell export LD_LIBRARY_PATH=/opt/java/jdk1.7.0_45/jre/lib/amd64/server:$LD_LIBRARY_PATH in osx export DYLD_LIBRARY_PATH=/System/Library/Java/JavaVirtualMachines/jdk1.7.0_09.jdk/Contents/Home/jre/lib/server:$DYLD_LIBRARY_PATH
diff --git a/docs/source/usage.rst b/docs/source/usage.rst
deleted file mode 100644
index 5def434f..00000000
--- a/docs/source/usage.rst
+++ /dev/null
@@ -1,64 +0,0 @@
-Usage
-=====
-There are two tools available in pyBDY. They are described in detail below.
-
-pybdy
-------
-
-This command line tool reads a BDY file, extracts boundary data and prepares
-the data for a NEMO simulation. The bdy file is a plain text file containing
-key value pairs. Please look at the sample `namelist.bdy
-`_
-file, which shares common syntax with the NEMO simulation namelist input file.
-
-.. note:: Directory paths in bdy file can be relative or absolute.
- The application picks the relative path from the current working
- directory.
-
-Syntax for pybdy command is
-
-::
-
- > pybdy [-g] -s
-
-For help
-
-::
-
- > pybdy -h
- > usage: pybdy [-g] -s
- > -g (optional) will open settings editor before extracting the data
- > -s file to use
-
-Example comamnd
-
-::
-
- > pybdy -g -s namelist.bdy
-
-
-pybdy_settings_editor
-----------------------
-
-This tool will open a window where you can edit the mask and change the values of bdy parameters.
-
-Syntax for pybdy_settings_editor command is
-
-::
-
- > pybdy_settings_editor [-s ]
-
-.. note:: If no file name is specified then a file dialog box will open to select a file.
-
-For help
-
-::
-
- > pybdy_settings_editor -h
- > usage: pybdy_settings_editor -s
-
-Example:
-
-::
-
- pybdy_settings_editor -s namelist.bdy
diff --git a/inputs/namelist_local.bdy b/inputs/namelist_local.bdy
index f3fa7562..0d1db96a 100644
--- a/inputs/namelist_local.bdy
+++ b/inputs/namelist_local.bdy
@@ -9,21 +9,6 @@
!!
!!>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
-!------------------------------------------------------------------------------
-! vertical coordinate
-!------------------------------------------------------------------------------
- rn_hmin = -10 ! min depth of the ocean (>0) or
- ! min number of ocean level (<0)
-
-!------------------------------------------------------------------------------
-! s-coordinate or hybrid z-s-coordinate
-!------------------------------------------------------------------------------
- rn_sbot_min = 10. ! minimum depth of s-bottom surface (>0) (m)
- rn_sbot_max = 7000. ! maximum depth of s-bottom surface
- ! (= ocean depth) (>0) (m)
- ln_s_sigma = .false. ! hybrid s-sigma coordinates
- rn_hc = 150.0 ! critical depth with s-sigma
-
!------------------------------------------------------------------------------
! grid information
!------------------------------------------------------------------------------
@@ -44,7 +29,7 @@
sn_fn = 'NNA_R12' ! prefix for output files
nn_fv = -1e20 ! set fill value for output files
nn_src_time_adj = 0 ! src time adjustment
- sn_dst_metainfo = 'Benchmarking Data'
+ sn_dst_metainfo = 'Benchmarking Data' ! history info
!------------------------------------------------------------------------------
! unstructured open boundaries
@@ -78,15 +63,15 @@
clname(7) = 'M4'
ln_trans = .true. ! interpolate transport rather than
! velocities
- ! location of TPXO7.2 data
- sn_tide_grid_7p2 = './inputs/tpxo7.2/grid_tpxo7.2.nc'
- sn_tide_h = './inputs/tpxo7.2/h_tpxo7.2.nc'
- sn_tide_u = './inputs/tpxo7.2/u_tpxo7.2.nc'
- ! location of TPXO9v5 data: single constituents per file
- sn_tide_grid_9p5 = './inputs/TPXO9_atlas_v5_nc/grid_tpxo9_atlas_30_v5.nc'
- sn_tide_dir = './inputs/TPXO9_atlas_v5_nc/'
- ! location of FES2014 data
- sn_tide_fes = './inputs/FES2014/'
+ ! location of TPXO7.2 data
+ sn_tide_grid_7p2 = './inputs/tpxo7.2/grid_tpxo7.2.nc'
+ sn_tide_h = './inputs/tpxo7.2/h_tpxo7.2.nc'
+ sn_tide_u = './inputs/tpxo7.2/u_tpxo7.2.nc'
+ ! location of TPXO9v5 data: single constituents per file
+ sn_tide_grid_9p5 = './inputs/TPXO9_atlas_v5_nc/grid_tpxo9_atlas_30_v5.nc'
+ sn_tide_dir = './inputs/TPXO9_atlas_v5_nc/'
+ ! location of FES2014 data
+ sn_tide_fes = './inputs/FES2014/'
!------------------------------------------------------------------------------
! Time information for output
@@ -105,11 +90,9 @@
rn_r0 = 0.041666666 ! decorrelation distance use in gauss
! smoothing onto dst points. Need to
! make this a funct. of dlon
- sn_history = 'Benchmarking test case'
- ! history for netcdf file
ln_nemo3p4 = .true. ! else presume v3.2 or v3.3
nn_alpha = 0 ! Euler rotation angle
nn_beta = 0 ! Euler rotation angle
nn_gamma = 0 ! Euler rotation angle
- rn_mask_max_depth = 100.0 ! Maximum depth to be ignored for the mask
- rn_mask_shelfbreak_dist = 20000.0 ! Distance from the shelf break
+ rn_mask_max_depth = 100.0 ! Maximum depth to be ignored for the mask
+ rn_mask_shelfbreak_dist = 20000.0 ! Distance from the shelf break
diff --git a/inputs/namelist_remote.bdy b/inputs/namelist_remote.bdy
index 49fb113a..b9c60947 100644
--- a/inputs/namelist_remote.bdy
+++ b/inputs/namelist_remote.bdy
@@ -9,24 +9,6 @@
!!
!!>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
-!------------------------------------------------------------------------------
-! vertical coordinate
-!------------------------------------------------------------------------------
- ln_zco = .false. ! z-coordinate - full steps (T/F)
- ln_zps = .true. ! z-coordinate - partial steps (T/F)
- ln_sco = .false. ! s- or hybrid z-s-coordinate (T/F)
- rn_hmin = -10 ! min depth of the ocean (>0) or
- ! min number of ocean level (<0)
-
-!------------------------------------------------------------------------------
-! s-coordinate or hybrid z-s-coordinate
-!------------------------------------------------------------------------------
- rn_sbot_min = 10. ! minimum depth of s-bottom surface (>0) (m)
- rn_sbot_max = 7000. ! maximum depth of s-bottom surface
- ! (= ocean depth) (>0) (m)
- ln_s_sigma = .false. ! hybrid s-sigma coordinates
- rn_hc = 150.0 ! critical depth with s-sigma
-
!------------------------------------------------------------------------------
! grid information
!------------------------------------------------------------------------------
@@ -46,7 +28,7 @@
sn_fn = 'NNA_R12' ! prefix for output files
nn_fv = -1e20 ! set fill value for output files
nn_src_time_adj = 0 ! src time adjustment
- sn_dst_metainfo = 'Benchmarking Data'
+ sn_dst_metainfo = 'Benchmarking Data' ! history info
!------------------------------------------------------------------------------
! unstructured open boundaries
@@ -83,14 +65,14 @@
sn_date_end = '1979-12-01' ! dst output date end YYYY-MM-DD
sn_dst_calendar = 'gregorian' ! output calendar format
sn_date_origin = '1960-01-01' ! reference for time counter YYYY-MM-DD ! location of TPXO7.2 data
- sn_tide_grid = './inputs/tpxo7.2/grid_tpxo7.2.nc'
- sn_tide_h = './inputs/tpxo7.2/h_tpxo7.2.nc'
- sn_tide_u = './inputs/tpxo7.2/u_tpxo7.2.nc'
- ! location of TPXO9v5 data: single constituents per file
- sn_tide_grid = './inputs/TPXO9_atlas_v5_nc/grid_tpxo9_atlas_30_v5.nc'
- sn_tide_dir = './inputs/TPXO9_atlas_v5_nc/'
- ! location of FES2014 data
- sn_tide_fes = './inputs/FES2014/'
+ sn_tide_grid = './inputs/tpxo7.2/grid_tpxo7.2.nc'
+ sn_tide_h = './inputs/tpxo7.2/h_tpxo7.2.nc'
+ sn_tide_u = './inputs/tpxo7.2/u_tpxo7.2.nc'
+ ! location of TPXO9v5 data: single constituents per file
+ sn_tide_grid = './inputs/TPXO9_atlas_v5_nc/grid_tpxo9_atlas_30_v5.nc'
+ sn_tide_dir = './inputs/TPXO9_atlas_v5_nc/'
+ ! location of FES2014 data
+ sn_tide_fes = './inputs/FES2014/'
!------------------------------------------------------------------------------
! Additional parameters
@@ -99,11 +81,9 @@
rn_r0 = 0.041666666 ! decorrelation distance use in gauss
! smoothing onto dst points. Need to
! make this a funct. of dlon
- sn_history = 'Benchmarking test case'
- ! history for netcdf file
ln_nemo3p4 = .true. ! else presume v3.2 or v3.3
nn_alpha = 0 ! Euler rotation angle
nn_beta = 0 ! Euler rotation angle
nn_gamma = 0 ! Euler rotation angle
- rn_mask_max_depth = 100.0 ! Maximum depth to be ignored for the mask
- rn_mask_shelfbreak_dist = 20000.0 ! Distance from the shelf break
+ rn_mask_max_depth = 100.0 ! Maximum depth to be ignored for the mask
+ rn_mask_shelfbreak_dist = 20000.0 ! Distance from the shelf break
diff --git a/mkdocs.yml b/mkdocs.yml
new file mode 100644
index 00000000..17663585
--- /dev/null
+++ b/mkdocs.yml
@@ -0,0 +1,84 @@
+site_name: pyBDY Documentation
+site_url: https://noc-msm.github.io/pyBDY
+
+repo_url: https://github.com/NOC-MSM/pyBDY
+repo_name: pyBDY
+edit_uri: edit/main/docs/
+
+theme:
+ name: material
+ language: en
+ logo: assets/icons/noc_logo.png
+ favicon: assets/icons/favicon.ico
+ icon:
+ edit: material/pencil
+ view: material/eye
+ repo: fontawesome/brands/git-alt
+ font:
+ text: Roboto
+ code: Roboto Mono
+ features:
+ - content.action.edit
+ - content.tabs.link
+ - content.code.copy
+ - content.code.annotate
+ - content.code.select
+ - toc.integrate
+ - navigation.tabs
+ - navigation.expand
+ - navigation.footer
+ - search.suggest
+ palette:
+ # Light Mode
+ - scheme: default
+ toggle:
+ icon: material/weather-night
+ name: Light mode
+ primary: light blue
+ accent: deep orange
+ # Dark Mode
+ - scheme: slate
+ toggle:
+ icon: material/weather-sunny
+ name: Dark mode
+ primary: light blue
+ accent: deep purple
+
+markdown_extensions:
+- attr_list
+- pymdownx.emoji:
+ emoji_index: !!python/name:material.extensions.emoji.twemoji
+ emoji_generator: !!python/name:material.extensions.emoji.to_svg
+- pymdownx.highlight:
+ anchor_linenums: true
+ line_spans: __span
+ pygments_lang_class: true
+- pymdownx.inlinehilite
+- pymdownx.snippets
+- admonition
+- pymdownx.details
+- pymdownx.superfences:
+ custom_fences:
+ - name: mermaid
+ class: mermaid
+ format: !!python/name:pymdownx.superfences.fence_code_format
+- pymdownx.tabbed:
+ alternate_style: true
+- pymdownx.arithmatex:
+ generic: true
+- md_in_html
+- pymdownx.blocks.caption
+
+plugins:
+- search
+- glightbox
+
+extra:
+ social:
+ - icon: fontawesome/brands/github
+ link: https://github.com/NOC-MSM/pyBDY
+ name: GitHub
+
+extra_javascript:
+- javascripts/mathjax.js
+- https://unpkg.com/mathjax@3/es5/tex-mml-chtml.js
diff --git a/src/grid/__init__.py b/src/grid/__init__.py
new file mode 100644
index 00000000..919a1a0f
--- /dev/null
+++ b/src/grid/__init__.py
@@ -0,0 +1,13 @@
+"""a Python based regional NEMO model configuration toolbox."""
+
+# Copyright 2023, NOC.
+
+try:
+ # NOTE: the `version.py` file must not be present in the git repository
+ # as it is generated by setuptools at install time
+ from .version import __version__
+except ImportError: # pragma: no cover
+ # Local copy or not installed with setuptools
+ __version__ = "999"
+
+__all__ = ["__version__"]
diff --git a/src/grid/hgr.py b/src/grid/hgr.py
index 7d38ae7c..26928c90 100644
--- a/src/grid/hgr.py
+++ b/src/grid/hgr.py
@@ -39,14 +39,14 @@ def __init__(self, hgr_file, name_map_file, logger, dst=1):
"""
Master horizontal class.
- Args:
- ----
+ Parameters
+ ----------
hgr_file (str) : string of file for loading hgr data
name_map_file (str) : string of file for mapping variable names
logger (object) : log error and messages
dst (bool) : flag for destination (true) or source (false)
- Returns:
+ Returns
-------
H_grid (object) : horizontal grid object
"""
@@ -101,9 +101,13 @@ def get_vars(self, vars_want):
"""
Get the glam, gphi and e scale factors from file if possible.
- Args:
- ----
+ Parameters
+ ----------
vars_want (list) : variables needed from file.
+
+ Returns
+ -------
+ None : var_list is populated
"""
# find the variables that have been name mapped
if self.dst:
@@ -185,13 +189,13 @@ def fill_hgrid_vars(grid_type, grid, missing):
"""
Calculate the missing horizontal grid variables and add them to grid.
- Args:
- ----
+ Parameters
+ ----------
grid_type (str) : type of horizontal grid (A, B or C)
grid (dict) : dictionary of grid data variable
missing (list) : list of missing variables to calculate
- Returns:
+ Returns
-------
grid (dict) : horizontal grid data dictionary
"""
@@ -225,12 +229,12 @@ def calc_grid_from_t(t_mesh, mesh):
"""
Calculate missing glam, gphi or gdep from t-grid.
- Args:
- ----
+ Parameters
+ ----------
t_mesh (np.array) : mesh variable glam or gphi on t-grid
mesh (str) : grid mesh type (glam, gphi, or gdep of u, v, f)
- Returns:
+ Returns
-------
mesh_out (dict) : horizontal grid mesh data variable
"""
@@ -265,13 +269,13 @@ def calc_e1_e2(glam, gphi, ij):
"""
Calculate missing scale factor e1 and e2 from glam or gphi.
- Args:
- ----
+ Parameters
+ ----------
glam (np.array) : mesh variable glam (lon) [time, j, i]
gphi (np.array) : mesh variable gphi (lat) [time, j, i]
ij (int) : ij direction 1 (i or x direction) or 2 (j or y direction)
- Returns:
+ Returns
-------
e (np.array) : horizontal distance scale factor e
"""
diff --git a/src/grid/zgr.py b/src/grid/zgr.py
index 3f1a51ed..d9cfeceb 100644
--- a/src/grid/zgr.py
+++ b/src/grid/zgr.py
@@ -40,8 +40,8 @@ def __init__(self, zgr_file, name_map_file, hgr_type, e_dict, logger, dst=1):
"""
Master depth class.
- Args:
- ----
+ Parameters
+ ----------
zgr_file (str) : string of file for loading zgr data
name_map_file (str) : string of file for mapping variable names
hgr_type (str) : horizontal grid type
@@ -49,7 +49,7 @@ def __init__(self, zgr_file, name_map_file, hgr_type, e_dict, logger, dst=1):
logger (object) : log error and messages
dst (bool) : flag for destination (true) or source (false)
- Returns:
+ Returns
-------
Depth (object) : Depth object
"""
@@ -110,9 +110,13 @@ def get_vars(self, vars_want):
"""
Get the gdep and e3 scale factors from file if possible.
- Args:
- ----
+ Parameters
+ ----------
vars_want (list) : variables needed from file.
+
+ Returns
+ -------
+ None : var_list is populated
"""
# find the variables that have been name mapped
if self.dst:
@@ -199,15 +203,15 @@ def fill_zgrid_vars(zgr_type, grid, hgr_type, e_dict, missing):
"""
Calculate the missing vertical grid variables and add them to grid.
- Args:
- ----
+ Parameters
+ ----------
zgr_type (str) : type of vertical grid (zco, zps or sco)
grid (dict) : dictionary of grid data variable
hgr_type (str) : horizontal grid type
e_dict (dict) : dictionary of e1 and e2 scale factors
missing (list) : list of missing variables to calculate
- Returns:
+ Returns
-------
grid (dict) : vertical grid data dictionary
"""
@@ -302,11 +306,11 @@ def calc_gdepw(gdept):
"""
Calculate missing gdepw from gdept.
- Args:
- ----
+ Parameters
+ ----------
gdept (np.array) : mesh variable gdept on t-grid
- Returns:
+ Returns
-------
dep_out (np.array) : vertical grid mesh data variable
"""
@@ -329,13 +333,13 @@ def vert_calc_e3(gdep_mid, gdep_top, lev):
"""
Calculate missing vertical scale factors e3 from gdep.
- Args:
- ----
+ Parameters
+ ----------
gdep_mid (np.array) : mesh variable on t levels
gdep_top (np.array) : mesh variable on w levels
lev (str) : grid level type (e3 of t, w, u, v)
- Returns:
+ Returns
-------
e3 (np.array) : vertical distance scale factor e3 of lev
"""
@@ -366,14 +370,14 @@ def horiz_interp_lev(t, w, zgr_type, hgr_type):
For C-Grids, zps or sco verticle coords are used to define u, v, and f.
For B-Grids, u and v values are set to f values following zps or sco.
- Args:
- ----
+ Parameters
+ ----------
t (np.array) : vertical scale factors e or dep on t points
w (np.array) : vertical scale factors e or dep on w points
zgr_type (str) : type of vertical grid (zco, zps or sco)
hgr_type (str) : horizontal grid type (A, B or C)
- Returns:
+ Returns
-------
lev (dict) : vertical distance scale factor e or gdep
"""
@@ -427,13 +431,13 @@ def horiz_interp_e3_old(e_in, var_in, lev):
To interpolate to get e3u or e3v, input var_in as e3t data but for e3f this
should be e3u.
- Args:
- ----
+ Parameters
+ ----------
e_in (dict) : all horizontal scale factors e1 and e2 in dictionary
var_in (np.array) : e scale factor to interpolate from e3t (or e3u for f)
lev (str) : grid level type (e3 of u, v, f)
- Returns:
+ Returns
-------
e3 (np.array) : vertical distance scale factor e3 of lev
"""
diff --git a/src/pybdy/nemo_bdy_chunk.py b/src/pybdy/nemo_bdy_chunk.py
index d47882bf..44ac4c9e 100644
--- a/src/pybdy/nemo_bdy_chunk.py
+++ b/src/pybdy/nemo_bdy_chunk.py
@@ -39,17 +39,14 @@ def chunk_bdy(bdy):
The chunks are then optionally split in the middle if they're above a certain size
after attempting to split at corners.
- Args:
- ----
- bdy (Boundary object) : object with indices of the boundary organised as
- bdy.bdy_i[bdy point, i/j grid]
- and rim width number
- bdy.bdy_r[bdy_point]
+ Parameters
+ ----------
+ bdy (obj) : organised as bdy_i[point, i/j grid] and rim width bdy_r[point]
logger : log error and messages
- Returns:
+ Returns
-------
- numpy.array : array of chunk numbers
+ chunk_number (numpy.array) : array of chunk numbers
"""
rw = bdy.settings["rimwidth"]
@@ -71,16 +68,16 @@ def chunk_land(ibdy, jbdy, chunk_number, rw):
"""
Find natural breaks in the boundary looking for gaps in i and j.
- Args:
- ----
+ Parameters
+ ----------
ibdy (numpy.array) : index in i direction
jbdy (numpy.array) : index in j direction
chunk_number (numpy.array) : array of chunk numbers. -1 means an unassigned chunk number
rw (int) : rimwidth
- Returns:
+ Returns
-------
- numpy.array : array of chunk numbers
+ chunk_number (numpy.array) : array of chunk numbers
"""
if np.min(chunk_number) <= -1:
chk = 0
@@ -132,18 +129,17 @@ def chunk_corner(ibdy, jbdy, rbdy, chunk_number, rw):
To do this we look for a change in direction along each rim.
- Args:
- ----
+ Parameters
+ ----------
ibdy (numpy.array) : index in i direction
jbdy (numpy.array) : index in j direction
rbdy (numpy.array) : rim index
chunk_number (numpy.array) : array of chunk numbers. -1 means an unassigned chunk number
rw (int) : rimwidth
- mid_split (list) : list of chunk numbers that need splitting
- Returns:
+ Returns
-------
- numpy.array : array of chunk numbers
+ chunk_number (numpy.array) : array of chunk numbers
"""
all_chunk = np.unique(chunk_number)
all_chunk_st = all_chunk * 1
@@ -340,15 +336,15 @@ def chunk_large(ibdy, jbdy, chunk_number):
"""
Split boundaries that have too much white space and are too large.
- Args:
- ----
+ Parameters
+ ----------
ibdy (numpy.array) : index in i direction
jbdy (numpy.array) : index in j direction
chunk_number (numpy.array) : array of chunk numbers. -1 means an unassigned chunk number
- Returns:
+ Returns
-------
- numpy.array : array of chunk numbers
+ chunk_number (numpy.array) : array of chunk numbers
"""
thresh_ratio = 1 / 3 # 1:3
thresh_large = 2000
diff --git a/src/pybdy/nemo_bdy_extr_assist.py b/src/pybdy/nemo_bdy_extr_assist.py
index a742f747..4cc043b1 100644
--- a/src/pybdy/nemo_bdy_extr_assist.py
+++ b/src/pybdy/nemo_bdy_extr_assist.py
@@ -110,7 +110,8 @@ def check_wrap(imin, imax, sc_lon):
def get_vertical_weights(dst_dep, dst_len_z, num_bdy, sc_z, sc_z_len, ind, zco):
- """Determine 3D depth vertical weights for the linear interpolation onto Dst grid.
+ """
+ Determine 3D depth vertical weights for the linear interpolation onto Dst grid.
Selects 9 source points horizontally around a destination grid point.
Calculated the distance from each source point to the destination to
@@ -123,8 +124,7 @@ def get_vertical_weights(dst_dep, dst_len_z, num_bdy, sc_z, sc_z_len, ind, zco):
num_bdy (int) : number of boundary points in chunk
sc_z (np.array) : the depth of the source grid [k, j, i]
sc_z_len (int) : the length of depth axis of the source grid
- ind (np.array) : indices of bdy points and 9 nearest neighbours
- for flattened j,i array order="F" [nbdy, 9]
+ ind (np.array) : indices of bdy and 9 nearest neighbours flattened "F" j,i [nbdy, 9]
zco (bool) : if True z levels are not spatially varying
Returns
@@ -248,6 +248,9 @@ def get_vertical_weights_zco(dst_dep, dst_len_z, num_bdy, sc_z, sc_z_len):
Calculated the vertical distance from each source point to the destination to
be used in weightings. The resulting arrays are [nbdy * nz, 2].
+ Note: z_dist and z_ind are [nbdy*nz, 2] where [:, 0] is the nearest vertical index
+ and [:, 1] is the index above or below i.e. the vertical index -1 for sc_z > dst_z
+ and vertical index +1 for sc_z <= dst_z
Parameters
----------
@@ -262,12 +265,6 @@ def get_vertical_weights_zco(dst_dep, dst_len_z, num_bdy, sc_z, sc_z_len):
z_dist (np.array) : the distance weights of the selected points
z_ind (np.array) : the indices of the sc depth above and below bdy
"""
- # Note: z_dist and z_ind are [nbdy*nz, 2]
- # where [:, 0] is the nearest vertical index
- # and [:, 1] is the index above or below
- # i.e. the vertical index -1 for sc_z > dst_z
- # and vertical index +1 for sc_z <= dst_z
-
# Allocate vertical index array
sc_z = sc_z[:, 0, 0]
dst_dep_rv = dst_dep.ravel(order="F").filled(np.nan)
@@ -408,15 +405,15 @@ def distance_weights(sc_bdy, dist_tot, sc_z_len, r0, logger):
"""
Find the distance weightings for averaging source data to destination.
- Args:
- ----
+ Parameters
+ ----------
sc_bdy (numpy.array) : source data
dist_tot (numpy.array) : distance from dst point to 9 nearest sc points
sc_z_len (int) : the number of depth levels
r0 (float) : correlation distance
logger : log of statements
- Returns:
+ Returns
-------
dist_wei (numpy.array) : weightings for averaging
dist_fac (numpy.array) : total weighting factor
@@ -467,12 +464,12 @@ def valid_index(sc_bdy, logger):
"""
Find an array of valid indicies.
- Args:
- ----
+ Parameters
+ ----------
sc_bdy (numpy.array) : source data
logger : log of statements
- Returns:
+ Returns
-------
data_ind (numpy.array) : indicies of max depth of valid data
nan_ind (numpy.array) : indicies where source is land
@@ -503,14 +500,14 @@ def interp_horizontal(sc_bdy, dist_wei, dist_fac, logger):
"""
Interpolate the source data to the destination grid using weighted average.
- Args:
- ----
+ Parameters
+ ----------
sc_bdy (numpy.array) : source data
dist_wei (numpy.array) : weightings for interpolation
dist_fac (numpy.array) : sum of weightings
logger : log of statements
- Returns:
+ Returns
-------
dst_bdy (numpy.array) : destination bdy points with data from source grid
"""
diff --git a/src/pybdy/nemo_bdy_extr_tm3.py b/src/pybdy/nemo_bdy_extr_tm3.py
index edbdf2db..442997d2 100644
--- a/src/pybdy/nemo_bdy_extr_tm3.py
+++ b/src/pybdy/nemo_bdy_extr_tm3.py
@@ -25,7 +25,6 @@
@author James Harle
@author John Kazimierz Farey
@author: Mr. Srikanth Nagella
-$Last commit on:$
"""
# External Imports
import copy
@@ -64,15 +63,14 @@ def __init__(self, setup, SourceCoord, DstCoord, Grid, var_nam, grd, pair):
setup (list) : settings for bdy
SourceCoord (obj) : source grid information
DstCoord (obj) : destination grid information
- Grid (dict) : containing grid type 't', 'u', 'v'
- and source time
+ Grid (dict) : containing grid type 't', 'u', 'v' and source time
var_name (list) : netcdf file variable names (str)
years (list) : years to extract (default [1979])
months (list) : months to extract (default [11])
Returns
-------
- None
+ Extract (obj) : Object with indexing arrays and weightings ready for interpolation
"""
self.logger = logging.getLogger(__name__)
self.g_type = grd
@@ -506,8 +504,12 @@ def extract_month(self, year, month):
Parameters
----------
- year -- year of data to be extracted
- month -- month of the year to be extracted
+ year : year of data to be extracted
+ month : month of the year to be extracted
+
+ Returns
+ -------
+ self.data_out : data from source on bdy locations and depths
"""
self.logger.info("extract_month function called")
@@ -1034,12 +1036,16 @@ def extract_month(self, year, month):
# equivalent to Matlab alpha(beta(:))
def _flat_ref(self, alpha, beta):
"""
- Extract input index elements from array and order them in Fotran array and return the new array.
+ Extract input index elements from array and order them in Fortran array and return the new array.
Parameters
----------
- alpha -- input array
- beta -- index array
+ alpha : input array
+ beta : index array
+
+ Returns
+ -------
+ alpha : index elements in flat Fortran array
"""
return alpha.flatten("F")[beta.flatten("F")].reshape(beta.shape, order="F")
@@ -1054,10 +1060,15 @@ def cal_trans(self, source, dest, year, month):
Parameters
----------
- source -- source calendar
- dest -- destination calendar
- year -- input year
- month -- input month
+ source : source calendar
+ dest : destination calendar
+ year : input year
+ month : input month
+
+ Returns
+ -------
+ sf : scale factor
+ ed : number of days in month
"""
vals = {"gregorian": 365.0 + isleap(year), "noleap": 365.0, "360_day": 360.0}
if source not in list(vals.keys()):
@@ -1087,15 +1098,12 @@ def time_delta(self, time_counter):
Parameters
----------
- time_counter
- model time coordinate
+ time_counter : model time coordinate
Returns
-------
- deltaT
- length of time step
- dstep
- number of time steps per day
+ deltaT : length of time step
+ dstep : number of time steps per day
"""
# get time derivative
deltaT = np.diff(time_counter)
@@ -1218,10 +1226,10 @@ def write_out(self, year, month, ind, unit_origin):
Parameters
----------
- year (int) : year to write out
- month (int) : month to write out
- ind (dict): dictionary holding grid information
- unit_origin (str) : time reference '%d 00:00:00' %date_origin
+ year (int) : year to write out
+ month (int) : month to write out
+ ind (dict): dictionary holding grid information
+ unit_origin (str) : time reference '%d 00:00:00' %date_origin
Returns
-------
diff --git a/src/pybdy/nemo_bdy_gen_c.py b/src/pybdy/nemo_bdy_gen_c.py
index d78b4808..fce9b3b5 100644
--- a/src/pybdy/nemo_bdy_gen_c.py
+++ b/src/pybdy/nemo_bdy_gen_c.py
@@ -16,25 +16,28 @@
class Boundary:
- # Bearings for overlays
- _NORTH = [1, -1, 1, -1, 2, None, 1, -1]
- _SOUTH = [1, -1, 1, -1, None, -2, 1, -1]
- _EAST = [1, -1, 1, -1, 1, -1, 2, None]
- _WEST = [1, -1, 1, -1, 1, -1, None, -2]
+ """Class for boundary definitions."""
def __init__(self, boundary_mask, settings, grid):
"""
Generate the indices for NEMO Boundary and returns a Grid object with indices.
- Paramemters
- -----------
- boundary_mask -- boundary mask
- settings -- dictionary of setting values
- grid -- type of the grid 't', 'u', 'v'
- Attributes:
- bdy_i -- index
- bdy_r -- r index
+ Parameters
+ ----------
+ boundary_mask : boundary mask
+ settings : dictionary of setting values
+ grid : type of the grid 't', 'u', 'v'
+
+ Returns
+ -------
+ Boundary (object) : where bdy_i is index and bdy_r is the r index
"""
+ # Bearings for overlays
+ self._NORTH = [1, -1, 1, -1, 2, None, 1, -1]
+ self._SOUTH = [1, -1, 1, -1, None, -2, 1, -1]
+ self._EAST = [1, -1, 1, -1, 1, -1, 2, None]
+ self._WEST = [1, -1, 1, -1, 1, -1, None, -2]
+
self.logger = logging.getLogger(__name__)
bdy_msk = boundary_mask
self.settings = settings
@@ -90,10 +93,10 @@ def __init__(self, boundary_mask, settings, grid):
np.arange(bdy_msk.shape[1]), np.arange(bdy_msk.shape[0])
)
- SBi, SBj = self._find_bdy(igrid, jgrid, msk, self._SOUTH)
- NBi, NBj = self._find_bdy(igrid, jgrid, msk, self._NORTH)
- EBi, EBj = self._find_bdy(igrid, jgrid, msk, self._EAST)
- WBi, WBj = self._find_bdy(igrid, jgrid, msk, self._WEST)
+ SBi, SBj = self.find_bdy(igrid, jgrid, msk, self._SOUTH)
+ NBi, NBj = self.find_bdy(igrid, jgrid, msk, self._NORTH)
+ EBi, EBj = self.find_bdy(igrid, jgrid, msk, self._EAST)
+ WBi, WBj = self.find_bdy(igrid, jgrid, msk, self._WEST)
# create a 2D array index for the points that are on border
tij = np.column_stack(
@@ -123,8 +126,8 @@ def __init__(self, boundary_mask, settings, grid):
## Remove duplicate and open sea points ##
- bdy_i, bdy_r = self._remove_duplicate_points(bdy_i, bdy_r)
- bdy_i, bdy_r, nonmask_index = self._remove_landpoints_open_ocean(
+ bdy_i, bdy_r = self.remove_duplicate_points(bdy_i, bdy_r)
+ bdy_i, bdy_r, nonmask_index = self.remove_landpoints_open_ocean(
bdy_msk, bdy_i, bdy_r
)
@@ -144,7 +147,7 @@ def __init__(self, boundary_mask, settings, grid):
for i in range(rw - 1):
# Check each bearing
for b in [self._SOUTH, self._NORTH, self._WEST, self._EAST]:
- r_msk, r_msk_ref = self._fill(r_msk, r_msk_ref, b)
+ r_msk, r_msk_ref = self.fill(r_msk, r_msk_ref, b)
self.logger.debug("done loop")
# update bdy_i and bdy_r
@@ -154,7 +157,7 @@ def __init__(self, boundary_mask, settings, grid):
bdy_r_tmp = r_msk.T[new_ind.T]
bdy_i = np.vstack((bdy_i_tmp.T, bdy_i))
- uniqind = self._unique_rows(bdy_i)
+ uniqind = self.unique_rows(bdy_i)
bdy_i = bdy_i[uniqind, :]
bdy_r = np.hstack((bdy_r_tmp, bdy_r))
bdy_r = bdy_r[uniqind]
@@ -169,30 +172,35 @@ def __init__(self, boundary_mask, settings, grid):
self.logger.debug("Final bdy_i: %s", self.bdy_i.shape)
- def _remove_duplicate_points(self, bdy_i, bdy_r):
+ def remove_duplicate_points(self, bdy_i, bdy_r):
"""
Remove the duplicate points in the bdy_i and return the bdy_i and bdy_r.
Parameters
----------
- bdy_i -- bdy indexes
- bdy_r -- bdy rim values.
+ bdy_i : bdy indexes
+ bdy_r : bdy rim values.
+
+ Returns
+ -------
+ bdy_i : bdy indexes
+ bdy_r : bdy rim values.
"""
bdy_i2 = np.transpose(bdy_i, (1, 0))
- uniqind = self._unique_rows(bdy_i2)
+ uniqind = self.unique_rows(bdy_i2)
bdy_i = bdy_i2[uniqind]
bdy_r = bdy_r[uniqind]
return bdy_i, bdy_r
- def _remove_landpoints_open_ocean(self, mask, bdy_i, bdy_r):
+ def remove_landpoints_open_ocean(self, mask, bdy_i, bdy_r):
"""Remove the land points and open ocean points."""
unmask_index = mask[bdy_i[:, 1], bdy_i[:, 0]] == 1
bdy_i = bdy_i[unmask_index, :]
bdy_r = bdy_r[unmask_index]
return bdy_i, bdy_r, unmask_index
- def _find_bdy(self, igrid, jgrid, mask, brg):
+ def find_bdy(self, igrid, jgrid, mask, brg):
"""
Find the border indexes by checking the change from ocean to land.
@@ -202,10 +210,15 @@ def _find_bdy(self, igrid, jgrid, mask, brg):
Parameters
----------
- igrid -- I x direction indexes
- jgrid -- J y direction indexes
- mask -- mask data
- brg -- mask index range
+ igrid : I x direction indexes
+ jgrid : J y direction indexes
+ mask : mask data
+ brg : mask index range
+
+ Returns
+ -------
+ bdy_i : bdy indexes
+ bdy_r : bdy rim values.
"""
# subtract matrices to find boundaries, set to True
m1 = mask[brg[0] : brg[1], brg[2] : brg[3]]
@@ -219,7 +232,7 @@ def _find_bdy(self, igrid, jgrid, mask, brg):
return bdy_I, bdy_J
- def _fill(self, mask, ref, brg):
+ def fill(self, mask, ref, brg):
tmp = mask[brg[4] : brg[5], brg[6] : brg[7]]
ind = (ref - tmp) > 1
ref[ind] = tmp[ind] + 1
@@ -227,13 +240,17 @@ def _fill(self, mask, ref, brg):
return mask, ref
- def _unique_rows(self, t):
+ def unique_rows(self, t):
"""
- Return indexes of unique rows in the input 2D array.
+ Find indexes of unique rows in the input 2D array.
Parameters
----------
- t -- input 2D array.
+ t : input 2D array.
+
+ Returns
+ -------
+ indx : indexes of unique rows
"""
sh = np.shape(t)
if (len(sh) > 2) or (sh[0] == 0) or (sh[1] == 0):
diff --git a/src/pybdy/nemo_bdy_grid_angle.py b/src/pybdy/nemo_bdy_grid_angle.py
index d2dcbff7..d32f4af0 100644
--- a/src/pybdy/nemo_bdy_grid_angle.py
+++ b/src/pybdy/nemo_bdy_grid_angle.py
@@ -18,17 +18,33 @@
class GridAngle:
- # I and J offsets for different grid types
- CASES = {
- "t": [0, 0, 0, -1],
- "u": [0, 0, 0, -1],
- "v": [0, 0, -1, 0],
- "f": [0, 1, 0, 0],
- }
- MAP = {"t": "v", "u": "f", "v": "f", "f": "u"}
+ """Class to get orientation of grid from I and J offsets for different grid types."""
def __init__(self, hgr, imin, imax, jmin, jmax, cd_type):
+ """
+ Get sin and cosin files for the grid angle from North.
+
+ Parameters
+ ----------
+ hgr : grid object
+ imin : minimum model zonal indices
+ imax : maximum model zonal indices
+ jmin : minimum model meridional indices
+ jmax : maximum model meridional indices
+ cd_type: define the nature of pt2d grid points
+
+ Returns
+ -------
+ None : object
+ """
# set case and check validity
+ self.CASES = {
+ "t": [0, 0, 0, -1],
+ "u": [0, 0, 0, -1],
+ "v": [0, 0, -1, 0],
+ "f": [0, 1, 0, 0],
+ }
+ self.MAP = {"t": "v", "u": "f", "v": "f", "f": "u"}
self.CD_T = cd_type.lower()
self.logger = logging.getLogger(__name__)
if self.CD_T not in ["t", "u", "v", "f"]:
@@ -51,23 +67,20 @@ def __init__(self, hgr, imin, imax, jmin, jmax, cd_type):
self.DIM_STR = None
# Get North pole direction and modulus for cd_type
- np_x, np_y, np_n = self._get_north_dir()
+ np_x, np_y, np_n = self.get_north_dir()
# Get i or j MAP segment Direction around cd_type
- sd_x, sd_y, sd_n = self._get_seg_dir(np_n)
+ sd_x, sd_y, sd_n = self.get_seg_dir(np_n)
# Get cosinus and sinus
- self.sinval, self.cosval = self._get_sin_cos(np_x, np_y, np_n, sd_x, sd_y, sd_n)
+ self.sinval, self.cosval = self.get_sin_cos(np_x, np_y, np_n, sd_x, sd_y, sd_n)
- # # # # # # # # # # # # #
- # # Functions # # # # # #
- # # # # # # # # # # # # #
-
- def _get_sin_cos(self, nx, ny, nn, sx, sy, sn):
+ def get_sin_cos(self, nx, ny, nn, sx, sy, sn):
+ """Get sin and cos from lat and lon using using scaler/vectorial products."""
# Geographic mesh
i, j, ii, jj = self.CASES[self.CD_T]
- var_one = self._get_lam_phi(map=True, i=i, j=j, single=True)
- var_two = self._get_lam_phi(map=True, i=ii, j=jj, single=True)
+ var_one = self.get_lam_phi(map=True, i=i, j=j, single=True)
+ var_two = self.get_lam_phi(map=True, i=ii, j=jj, single=True)
ind = (np.abs(var_one - var_two) % 360) < 1.0e-8
@@ -84,25 +97,25 @@ def _get_sin_cos(self, nx, ny, nn, sx, sy, sn):
return sin_val, cos_val
- # Finds North pole direction and modulus of some point
- def _get_north_dir(self):
- zlam, zphi = self._get_lam_phi()
- z_x_np = self._trig_eq(-2, "cos", zlam, zphi)
- z_y_np = self._trig_eq(-2, "sin", zlam, zphi)
+ def get_north_dir(self):
+ """Find North pole direction and modulus of some point."""
+ zlam, zphi = self.get_lam_phi()
+ z_x_np = self.trig_eq(-2, "cos", zlam, zphi)
+ z_y_np = self.trig_eq(-2, "sin", zlam, zphi)
z_n_np = np.power(z_x_np, 2) + np.power(z_y_np, 2)
return z_x_np, z_y_np, z_n_np
- # Find segmentation direction of some point
- def _get_seg_dir(self, north_n):
+ def get_seg_dir(self, north_n):
+ """Find segmentation direction of some point."""
i, j, ii, jj = self.CASES[self.CD_T]
- zlam, zphi = self._get_lam_phi(map=True, i=i, j=j)
- z_lan, z_phh = self._get_lam_phi(map=True, i=ii, j=jj)
+ zlam, zphi = self.get_lam_phi(map=True, i=i, j=j)
+ z_lan, z_phh = self.get_lam_phi(map=True, i=ii, j=jj)
- z_x_sd = self._trig_eq(2, "cos", zlam, zphi) - self._trig_eq(
+ z_x_sd = self.trig_eq(2, "cos", zlam, zphi) - self.trig_eq(
2, "cos", z_lan, z_phh
)
- z_y_sd = self._trig_eq(2, "sin", zlam, zphi) - self._trig_eq(
+ z_y_sd = self.trig_eq(2, "sin", zlam, zphi) - self.trig_eq(
2, "sin", z_lan, z_phh
) # N
@@ -111,9 +124,12 @@ def _get_seg_dir(self, north_n):
return z_x_sd, z_y_sd, z_n_sd
- # Returns lam/phi in (offset) i/j range for init grid type
- # Data must be converted to float64 to prevent dementation of later results
- def _get_lam_phi(self, map=False, i=0, j=0, single=False):
+ def get_lam_phi(self, map=False, i=0, j=0, single=False):
+ """
+ Get lam/phi in (offset) i/j range for init grid type.
+
+ Data must be converted to float64 to prevent dementation of later results.
+ """
d = self.DIM_STR
i, ii = self.IMIN + i, self.IMAX + i
j, jj = self.JMIN + j, self.JMAX + j
@@ -135,8 +151,8 @@ def _get_lam_phi(self, map=False, i=0, j=0, single=False):
return zlam, zphi
- # Returns long winded equation of two vars; some lam and phi
- def _trig_eq(self, x, eq, z_one, z_two):
+ def trig_eq(self, x, eq, z_one, z_two):
+ """Calculate long winded equation of two vars; some lam and phi."""
if eq == "cos":
z_one = np.cos(np.radians(z_one))
elif eq == "sin":
diff --git a/src/pybdy/nemo_bdy_setup.py b/src/pybdy/nemo_bdy_setup.py
index 18776800..fd294ab0 100644
--- a/src/pybdy/nemo_bdy_setup.py
+++ b/src/pybdy/nemo_bdy_setup.py
@@ -23,7 +23,7 @@
@author John Kazimierz Farey
@author James Harle
-$Last commit on:$
+
"""
# External imports
import logging
@@ -38,10 +38,6 @@ class Setup(object):
Notes
-----
This class holds the settings information
-
- Attributes
- ----------
- is a dict holding all the vars.
"""
def __init__(self, setfile):
@@ -52,10 +48,15 @@ def __init__(self, setfile):
-----
This constructor reads the settings file and sets the dictionary with
setting name/key and it's value.
+
+ Parameters
+ ----------
+ setfile (str) : settings file
"""
# Logging for class
self.logger = logging.getLogger(__name__)
self.filename = setfile
+ self.settings = {}
if not setfile: # debug
self.filename = "../data/namelist.bdy"
self._load_settings()
diff --git a/src/pybdy/nemo_bdy_src_time.py b/src/pybdy/nemo_bdy_src_time.py
deleted file mode 100644
index f84f5e90..00000000
--- a/src/pybdy/nemo_bdy_src_time.py
+++ /dev/null
@@ -1,60 +0,0 @@
-##################################################
-# Written by John Kazimierz Farey, Sep 2012 #
-# Port of Matlab code of James Harle #
-# # # #
-# Init with source directory for netcdf files #
-# Method to generates time/file list information #
-# for a particular grid #
-##################################################
-
-import logging
-from os import listdir
-
-from netCDF4 import Dataset, netcdftime
-
-
-class SourceTime:
- def __init__(self, src_dir):
- self.src_dir = src_dir
- self.logger = logging.getLogger(__name__)
-
- # returns a list of all the relevant netcdf files
- def _get_dir_list(self, grid):
- fend = "d05%s.nc" % grid.upper()
- dir_list = listdir(self.src_dir)
- for i in range(len(dir_list)):
- if dir_list[i][-7:] != fend:
- dir_list[i] = ""
- else:
- dir_list[i] = self.src_dir + dir_list[i]
-
- dir_list.sort()
-
- return [_f for _f in dir_list if _f]
-
- # Returns list of dicts of date/time info
- # I assume there is only one date per file
- # Each date is datetime instance. to get day etc use x.day
- # They should be in order
- # Matlab var dir_list is incorporated into src_time
- def get_source_time(self, grid, t_adjust):
- dir_list = self._get_dir_list(grid)
- src_time = []
- for f in range(len(dir_list)):
- self.logger.debug("get_source_time: %s", dir_list[f])
- nc = Dataset(dir_list[f], "r")
- varid = nc.variables["time_counter"]
- f_time = {}
- f_time["fname"] = dir_list[f]
-
- # First 2 values are in unicode. Pray.
- f_time["units"] = varid.units
- f_time["calendar"] = varid.calendar
- raw_date = varid[0] + t_adjust
- f_time["date"] = netcdftime.num2date(
- raw_date, f_time["units"], f_time["calendar"]
- )
-
- src_time.append(f_time)
-
- return src_time
diff --git a/src/pybdy/nemo_bdy_zgrv2.py b/src/pybdy/nemo_bdy_zgrv2.py
index c82c0e03..b9974664 100644
--- a/src/pybdy/nemo_bdy_zgrv2.py
+++ b/src/pybdy/nemo_bdy_zgrv2.py
@@ -41,9 +41,8 @@ def get_bdy_depths_old(bdy_t, bdy_u, bdy_v, DstCoord, settings):
Written by John Kazimierz Farey, Sep 2012
Port of Matlab code of James Harle
- # Generates depth points for t, u and v in one loop iteration
-
- Initialise with bdy t, u and v grid attributes (Grid.bdy_i) and settings dictionary
+ Generates depth points for t, u and v in one loop iteration.
+ Initialise with bdy t, u and v grid attributes (Grid.bdy_i) and settings dictionary.
"""
logger = logging.getLogger(__name__)
logger.debug("init Depth")
@@ -139,13 +138,13 @@ def get_bdy_depths(DstCoord, bdy_i, grd):
"""
Depth levels from the nearest neighbour on the source grid.
- Args:
- ----
+ Parameters
+ ----------
DstCoord (object) : Object containing destination grid info
bdy_i (np.array) : indices of the i, j bdy points [bdy, 2]
grd (str) : grid type t, u, v
- Returns:
+ Returns
-------
bdy_tz (array) : sc depths on bdy points on t levels
bdy_wz (array) : sc depths on bdy points on w levels
@@ -193,13 +192,13 @@ def get_bdy_sc_depths(SourceCoord, DstCoord, grd):
"""
Depth levels from the nearest neighbour on the source grid.
- Args:
- ----
+ Parameters
+ ----------
SourceCoord (object) : Object containing source grid info
DstCoord (object) : Object containing destination grid info
grd (str) : grid type t, u, v
- Returns:
+ Returns
-------
bdy_tz (array) : sc depths on bdy points on t levels
bdy_wz (array) : sc depths on bdy points on w levels
diff --git a/src/pybdy/nemo_coord_gen_pop.py b/src/pybdy/nemo_coord_gen_pop.py
index f63ce3b9..8c3b4b93 100644
--- a/src/pybdy/nemo_coord_gen_pop.py
+++ b/src/pybdy/nemo_coord_gen_pop.py
@@ -18,10 +18,22 @@
class Coord:
- _grid = ["t", "u", "v"]
+ """Class for writing boundayr coordinate data to netcdf file."""
- # Init with nc fname and dictionary of bdy inds
def __init__(self, fname, bdy_ind):
+ """
+ Create Nemo bdy indices for t, u, v points.
+
+ Parameters
+ ----------
+ fname (str) : file name of coords file for output
+ bdy_ind (numpy.array) : indicies of bdy points
+
+ Returns
+ -------
+ None : object
+ """
+ self._grid = ["t", "u", "v"]
self.bdy_ind = bdy_ind
self.logger = logging.getLogger(__name__)
self.logger.debug(fname)
@@ -32,15 +44,15 @@ def __init__(self, fname, bdy_ind):
self.ncid = Dataset(fname, "w", clobber=True, format="NETCDF4")
# Define Dimensions
- self.dim_id = self._create_dims()
+ self.dim_id = self.create_dims()
# Create tidy dictionaries to hold all our pretty variables
- self.var_nb_ij_id = self._build_dict(["i", "j"], ["nb", "i4", "unitless", 0])
- self.var_nb_r_id = self._build_dict(["r"], ["nb", "i4", "unitless", 0])
- self.var_g_lamphi_id = self._build_dict(
+ self.var_nb_ij_id = self.build_dict(["i", "j"], ["nb", "i4", "unitless", 0])
+ self.var_nb_r_id = self.build_dict(["r"], ["nb", "i4", "unitless", 0])
+ self.var_g_lamphi_id = self.build_dict(
["lam", "phi"], ["g", "f4", "degrees_east", "longitude"]
)
- self.var_e_12_id = self._build_dict(
+ self.var_e_12_id = self.build_dict(
["1", "2"], ["e", "f4", "metres", "scale factor"]
)
@@ -51,15 +63,11 @@ def __init__(self, fname, bdy_ind):
# Leave Define Mode
- # # # # # # # # #
- # # Functions # #
- # # # # # # # # #
-
def closeme(self):
self.ncid.close()
- # Creates dims and returns a dictionary of them
- def _create_dims(self):
+ def create_dims(self):
+ """Create dims and returns a dictionary of them."""
ret = {"xb": {}}
ret["xb"]["t"] = self.ncid.createDimension("xbT", len(self.bdy_ind["t"].bdy_i))
ret["xb"]["u"] = self.ncid.createDimension("xbU", len(self.bdy_ind["u"].bdy_i))
@@ -68,18 +76,18 @@ def _create_dims(self):
return ret
- # Sets up a grid dictionary
- def _build_dict(self, dim, units):
+ def build_dict(self, dim, units):
+ """Set up a grid dictionary."""
ret = {}
for g in self._grid:
ret[g] = {}
for d in dim:
- ret[g][d] = self._add_vars(d, g, units)
+ ret[g][d] = self.add_vars(d, g, units)
return ret
- # creates a var w/ attributes
- def _add_vars(self, dim, grd, unt):
+ def add_vars(self, dim, grd, unt):
+ """Create a var w/ attributes."""
dat = unt[2]
lname = unt[3]
if dim == "phi":
@@ -100,6 +108,7 @@ def _add_vars(self, dim, grd, unt):
return var
def populate(self, hgr):
+ """Populate the file with indices, lat, lon, and e dimensions."""
self.set_lenvar(self.var_nb_ij_id)
self.set_lenvar(self.var_nb_r_id)
@@ -108,10 +117,13 @@ def populate(self, hgr):
self.closeme()
- # sets the len var of each array in the var dictionary fed
- # specifying hgr and unt pulls data from loaded grid data
- # Otherwise pull it from the class dict
def set_lenvar(self, vardic, hgr=None, unt=None):
+ """
+ Set the len var of each array in the var dictionary.
+
+ Use by specifying hgr and unt which pulls data from loaded grid data.
+ Otherwise pull it from the class dict.
+ """
for ind in vardic:
x = 0
data = None
diff --git a/src/pybdy/profiler.py b/src/pybdy/profiler.py
index debd4572..3dbe558f 100644
--- a/src/pybdy/profiler.py
+++ b/src/pybdy/profiler.py
@@ -24,7 +24,6 @@
@author James Harle
@author John Kazimierz Farey
@author Srikanth Nagella
-$Last commit on:$
"""
# External imports
@@ -88,6 +87,10 @@ def process_bdy(setup_filepath=0, mask_gui=False):
setup_filepath (str) : file path to find namelist.bdy
mask_gui (bool): whether use of the GUI is required
+ Returns
+ -------
+ None : bdy data is written out to NetCDF file
+
"""
# Start Logger
logger.info("Start NRCT Logging: " + time.asctime())
@@ -388,6 +391,10 @@ def write_tidal_data(setup_var, dst_coord_var, grid, tide_cons, cons):
grid (dict): Description of arg1
tide_cons (list): Description of arg1
cons (data): cosz, sinz, cosu, sinu, cosv, sinv
+
+ Returns
+ -------
+ None : tidal data is written to NetCDF file
"""
# Mapping of variable names to grid types
@@ -489,7 +496,7 @@ def _get_mask(Setup, mask_gui):
Returns:
-------
- numpy.array : a mask array of the regional domain
+ bdy_msk (numpy.array) : a mask array of the regional domain
"""
# Initialise bdy_msk array
diff --git a/src/pybdy/reader/directory.py b/src/pybdy/reader/directory.py
index 0c00636a..fd4f1ede 100644
--- a/src/pybdy/reader/directory.py
+++ b/src/pybdy/reader/directory.py
@@ -1,293 +1,297 @@
-"""
-Abstraction for the data repository.
-
-@author: Mr. Srikanth Nagella.
-"""
-import copy
-import logging
-from os import listdir
-
-import numpy as np
-from cftime import utime
-from netCDF4 import Dataset
-
-
-class Reader(object):
- """
- Reader for all the files in the directory as one single object.
-
- Examples
- --------
- >>> reader = Reader("Folder path")
- >>> reader["t"]["votemper"][:, :, :, :]
- """
-
- grid_type_list = ["t", "u", "v", "i"]
-
- def __init__(self, directory, time_adjust):
- """
- Take in directory path as input and return the required information to the bdy.
-
- Keyword Arguments:
- -----------------
- directory -- The directory in which to look for the files
- time_adjust -- amount of time to be adjusted to the time read from file.
- """
- self.directory = directory
- self.day_interval = 1
- self.hr_interval = 0
- self.grid_source_data = {}
- for grid_type in self.grid_type_list:
- self.grid_source_data[grid_type] = self._get_source_timedata(
- grid_type, time_adjust
- )
- if self.grid_type_list is not None and len(self.grid_source_data) != 0:
- self._calculate_time_interval()
-
- def _get_dir_list(self, grid):
- """
- Scan the directory for a input grid related NetCDF files (i.e., ending with the grid name.
-
- Parameters
- ----------
- grid -- grid name eg. 't','v','u','i'.
-
- Returns
- -------
- list of files
- """
- fend = "%s.nc" % grid.upper()
- dir_list = listdir(self.directory)
- for i in range(len(dir_list)):
- if dir_list[i][-4:] != fend:
- dir_list[i] = ""
- else:
- dir_list[i] = self.directory + dir_list[i]
-
- dir_list.sort()
- return [_f for _f in dir_list if _f]
-
- def _delta_time_interval(self, time1, time2):
- """Get the difference between the two times in days and hours."""
- timedif = time2 - time1
- days = timedif / (60 * 60 * 24)
- hrs = timedif % (60 * 60 * 24)
- hrs = hrs / (60 * 60)
- return days, hrs
-
- def _get_source_timedata(self, grid, t_adjust):
- """
- Get the source time data information.
-
- Notes
- -----
- Builds up sourcedata objects of a given grid.
- """
- dir_list = self._get_dir_list(grid)
- group = GridGroup()
- group.data_list = []
- group.time_counter = []
- group.date_counter = []
- for filename in dir_list:
- nc = Dataset(filename, "r")
- varid = nc.variables["time_counter"]
- for index in range(0, len(varid)):
- x = [filename, index]
- group.data_list.append(x)
- group.time_counter.append(varid[index] + t_adjust)
- group.date_counter.append(
- utime(varid.units, varid.calendar).num2date(varid[index] + t_adjust)
- )
- group.units = varid.units
- group.calendar = varid.calendar
- nc.close()
- tmp_data_list = copy.deepcopy(group.data_list)
- tmp_time_counter = copy.deepcopy(group.time_counter)
- for index in range(len(group.time_counter)):
- tmp_data_list[index] = group.data_list[index]
- tmp_time_counter[index] = group.time_counter[index]
- group.data_list = tmp_data_list
- group.time_counter = tmp_time_counter
- return group
-
- def _calculate_time_interval(self):
- """
- Calculate the time interval of the each grid.
-
- If all the grids get the same interval then it sets it to the days and hours.
- Otherwise it throws an error.
- """
- days = set()
- hrs = set()
- for grid_type in list(self.grid_source_data.keys()):
- day, hr = self._delta_time_interval(
- self.grid_source_data[grid_type].time_counter[0],
- self.grid_source_data[grid_type].time_counter[1],
- )
- days.add(day)
- hrs.add(hr)
- if len(days) != 1 or len(hrs) != 1:
- raise Exception("All the Grid time interval is not same")
- self.day_interval = list(days)[0]
- self.hr_interval = list(hrs)[0]
-
- def __getitem__(self, val):
- if val in self.grid_type_list:
- return self.grid_source_data[val]
- else:
- return None
-
-
-class GridGroup:
- def __init__(self):
- pass
-
- def __getitem__(self, val):
- return Variable(self.data_list, val)
-
- def get_meta_data(self, variable, source_dic):
- """Return a dictionary with meta data information correspoinding to the variable."""
- # source_dic = {}
- try:
- var = self.__getitem__(variable)
- attrs = var.get_attribute_values(
- ["missing_value", "scale_factor", "add_offset", "_FillValue"]
- )
- source_dic["sf"] = 1
- source_dic["os"] = 0
- if attrs["missing_value"] is not None:
- source_dic["mv"] = attrs["missing_value"]
- if attrs["scale_factor"] is not None:
- source_dic["sf"] = attrs["scale_factor"]
- if attrs["add_offset"] is not None:
- source_dic["os"] = attrs["add_offset"]
- if attrs["_FillValue"] is not None:
- source_dic["fv"] = attrs["_FillValue"]
- return source_dic
- except KeyError:
- self.logger.error("Cannot find the requested variable " + variable)
- except (IOError, RuntimeError):
- self.logger.error("Cannot open the file " + self.file_name)
- return None
-
-
-class Variable(object):
- time_counter_const = "time_counter"
-
- def __init__(self, filenames, variable):
- self.variable = variable
- self.file_names = filenames
- self.dimensions = self._get_dimensions()
- self._set_time_dimension_index()
- self.logger = logging.getLogger(__name__)
-
- def __str__(self):
- return (
- "pyBDY Data Object from files: %s and variable %s" % self.file_names,
- self.variable,
- )
-
- def __len__(self):
- """Return the length of the variable."""
- try:
- dataset = Dataset(self.file_names[0], "r")
- dvar = dataset.variables[self.variable]
- val = len(dvar)
- dataset.close()
- return val
- except KeyError:
- self.logger.error("Cannot find the requested variable " + self.variable)
- except (IOError, RuntimeError):
- self.logger.error("Cannot open the file " + self.file_names[0])
- return None
-
- def __getitem__(self, val):
- """Return the data requested."""
- try:
- if self.time_dim_index == -1:
- dataset = Dataset(self.file_names[0][0], "r")
- dvar = dataset.variables[self.variable]
- retval = dvar[val]
- dataset.close()
- return retval
- else:
- # select all the files that are required for the selected range
- # read the data and merge them
- val = list(val)
- for index in range(len(val)):
- if type(val[index]) is not slice:
- if type(val[index]) is not np.ndarray:
- val[index] = slice(val[index], val[index] + 1)
- val = tuple(val)
- start = val[self.time_dim_index].start
- stop = val[self.time_dim_index].stop
- step = val[self.time_dim_index].step
- if step is None:
- step = 1
- if start is None:
- start = 0
- if stop is None:
- stop = len(self.file_names)
- finalval = None
- for index in range(start, stop, step):
- dataset = Dataset(self.file_names[index][0], "r")
- val = list(val)
- val[self.time_dim_index] = slice(
- self.file_names[index][1], self.file_names[index][1] + 1
- )
- val = tuple(val)
- dvar = dataset.variables[self.variable]
- retval = dvar[val]
- if finalval is None:
- finalval = retval
- else:
- finalval = np.concatenate(
- (finalval, retval), axis=self.time_dim_index
- )
- dataset.close()
- return finalval
-
- except KeyError:
- self.logger.error("Cannot find the requested variable " + self.variable)
- except (IOError, RuntimeError):
- self.logger.error("Cannot open the file " + self.file_names)
- return None
-
- def get_attribute_values(self, attr_name):
- """Return the attribute value of the variable."""
- try:
- dataset = Dataset(self.file_names[0][0], "r")
- dvar = dataset.variables[self.variable]
- ret_val = {}
- for name in attr_name:
- try:
- val = dvar.getncattr(name)
- ret_val[name] = val
- except AttributeError:
- ret_val[name] = None
- dataset.close()
- return ret_val
- except KeyError:
- self.logger.error("Cannot find the requested variable " + self.variable)
- except (IOError, RuntimeError):
- self.logger.error("Cannot open the file " + self.file_names[0])
- return None
-
- def _get_dimensions(self):
- """Return the dimensions of the variables."""
- try:
- dataset = Dataset(self.file_names[0][0], "r")
- dvar = dataset.variables[self.variable]
- return dvar.dimensions
- except KeyError:
- self.logger.error("Cannot find the requested variable " + self.variable)
- except (IOError, RuntimeError):
- self.logger.error("Cannot open the file " + self.file_names[0])
- return None
-
- def _set_time_dimension_index(self):
- """Set the time dimension index."""
- self.time_dim_index = -1
- for index in range(len(self.dimensions)):
- if self.dimensions[index] == self.time_counter_const:
- self.time_dim_index = index
+"""
+Abstraction for the data repository.
+
+@author: Mr. Srikanth Nagella.
+"""
+import copy
+import logging
+from os import listdir
+
+import numpy as np
+from cftime import utime
+from netCDF4 import Dataset
+
+
+class Reader(object):
+ """
+ Reader for all the files in the directory as one single object.
+
+ Examples
+ --------
+ reader = Reader("Folder path")
+ reader["t"]["votemper"][:, :, :, :]
+ """
+
+ grid_type_list = ["t", "u", "v", "i"]
+
+ def __init__(self, directory, time_adjust):
+ """
+ Take in directory path as input and return the required information to the bdy.
+
+ Parameters
+ ----------
+ directory : The directory in which to look for the files
+ time_adjust : amount of time to be adjusted to the time read from file.
+
+ Returns
+ -------
+ None : object
+ """
+ self.directory = directory
+ self.day_interval = 1
+ self.hr_interval = 0
+ self.grid_source_data = {}
+ for grid_type in self.grid_type_list:
+ self.grid_source_data[grid_type] = self.get_source_timedata(
+ grid_type, time_adjust
+ )
+ if self.grid_type_list is not None and len(self.grid_source_data) != 0:
+ self.calculate_time_interval()
+
+ def get_dir_list(self, grid):
+ """
+ Scan the directory for a input grid related NetCDF files (i.e., ending with the grid name.
+
+ Parameters
+ ----------
+ grid (str) : grid name eg. 't','v','u','i'.
+
+ Returns
+ -------
+ dir_list (list) : list of files
+ """
+ fend = "%s.nc" % grid.upper()
+ dir_list = listdir(self.directory)
+ for i in range(len(dir_list)):
+ if dir_list[i][-4:] != fend:
+ dir_list[i] = ""
+ else:
+ dir_list[i] = self.directory + dir_list[i]
+
+ dir_list.sort()
+ return [_f for _f in dir_list if _f]
+
+ def delta_time_interval(self, time1, time2):
+ """Get the difference between the two times in days and hours."""
+ timedif = time2 - time1
+ days = timedif / (60 * 60 * 24)
+ hrs = timedif % (60 * 60 * 24)
+ hrs = hrs / (60 * 60)
+ return days, hrs
+
+ def get_source_timedata(self, grid, t_adjust):
+ """
+ Get the source time data information.
+
+ Notes
+ -----
+ Builds up sourcedata objects of a given grid.
+ """
+ dir_list = self.get_dir_list(grid)
+ group = GridGroup()
+ group.data_list = []
+ group.time_counter = []
+ group.date_counter = []
+ for filename in dir_list:
+ nc = Dataset(filename, "r")
+ varid = nc.variables["time_counter"]
+ for index in range(0, len(varid)):
+ x = [filename, index]
+ group.data_list.append(x)
+ group.time_counter.append(varid[index] + t_adjust)
+ group.date_counter.append(
+ utime(varid.units, varid.calendar).num2date(varid[index] + t_adjust)
+ )
+ group.units = varid.units
+ group.calendar = varid.calendar
+ nc.close()
+ tmp_data_list = copy.deepcopy(group.data_list)
+ tmp_time_counter = copy.deepcopy(group.time_counter)
+ for index in range(len(group.time_counter)):
+ tmp_data_list[index] = group.data_list[index]
+ tmp_time_counter[index] = group.time_counter[index]
+ group.data_list = tmp_data_list
+ group.time_counter = tmp_time_counter
+ return group
+
+ def calculate_time_interval(self):
+ """
+ Calculate the time interval of the each grid.
+
+ If all the grids get the same interval then it sets it to the days and hours.
+ Otherwise it throws an error.
+ """
+ days = set()
+ hrs = set()
+ for grid_type in list(self.grid_source_data.keys()):
+ day, hr = self.delta_time_interval(
+ self.grid_source_data[grid_type].time_counter[0],
+ self.grid_source_data[grid_type].time_counter[1],
+ )
+ days.add(day)
+ hrs.add(hr)
+ if len(days) != 1 or len(hrs) != 1:
+ raise Exception("All the Grid time interval is not same")
+ self.day_interval = list(days)[0]
+ self.hr_interval = list(hrs)[0]
+
+ def __getitem__(self, val):
+ if val in self.grid_type_list:
+ return self.grid_source_data[val]
+ else:
+ return None
+
+
+class GridGroup:
+ def __init__(self):
+ pass
+
+ def __getitem__(self, val):
+ return Variable(self.data_list, val)
+
+ def get_meta_data(self, variable, source_dic):
+ """Return a dictionary with meta data information correspoinding to the variable."""
+ # source_dic = {}
+ try:
+ var = self.__getitem__(variable)
+ attrs = var.get_attribute_values(
+ ["missing_value", "scale_factor", "add_offset", "_FillValue"]
+ )
+ source_dic["sf"] = 1
+ source_dic["os"] = 0
+ if attrs["missing_value"] is not None:
+ source_dic["mv"] = attrs["missing_value"]
+ if attrs["scale_factor"] is not None:
+ source_dic["sf"] = attrs["scale_factor"]
+ if attrs["add_offset"] is not None:
+ source_dic["os"] = attrs["add_offset"]
+ if attrs["_FillValue"] is not None:
+ source_dic["fv"] = attrs["_FillValue"]
+ return source_dic
+ except KeyError:
+ self.logger.error("Cannot find the requested variable " + variable)
+ except (IOError, RuntimeError):
+ self.logger.error("Cannot open the file " + self.file_name)
+ return None
+
+
+class Variable(object):
+ time_counter_const = "time_counter"
+
+ def __init__(self, filenames, variable):
+ self.variable = variable
+ self.file_names = filenames
+ self.dimensions = self.get_dimensions()
+ self.set_time_dimension_index()
+ self.logger = logging.getLogger(__name__)
+
+ def __str__(self):
+ return (
+ "pyBDY Data Object from files: %s and variable %s" % self.file_names,
+ self.variable,
+ )
+
+ def __len__(self):
+ """Return the length of the variable."""
+ try:
+ dataset = Dataset(self.file_names[0], "r")
+ dvar = dataset.variables[self.variable]
+ val = len(dvar)
+ dataset.close()
+ return val
+ except KeyError:
+ self.logger.error("Cannot find the requested variable " + self.variable)
+ except (IOError, RuntimeError):
+ self.logger.error("Cannot open the file " + self.file_names[0])
+ return None
+
+ def __getitem__(self, val):
+ """Return the data requested."""
+ try:
+ if self.time_dim_index == -1:
+ dataset = Dataset(self.file_names[0][0], "r")
+ dvar = dataset.variables[self.variable]
+ retval = dvar[val]
+ dataset.close()
+ return retval
+ else:
+ # select all the files that are required for the selected range
+ # read the data and merge them
+ val = list(val)
+ for index in range(len(val)):
+ if type(val[index]) is not slice:
+ if type(val[index]) is not np.ndarray:
+ val[index] = slice(val[index], val[index] + 1)
+ val = tuple(val)
+ start = val[self.time_dim_index].start
+ stop = val[self.time_dim_index].stop
+ step = val[self.time_dim_index].step
+ if step is None:
+ step = 1
+ if start is None:
+ start = 0
+ if stop is None:
+ stop = len(self.file_names)
+ finalval = None
+ for index in range(start, stop, step):
+ dataset = Dataset(self.file_names[index][0], "r")
+ val = list(val)
+ val[self.time_dim_index] = slice(
+ self.file_names[index][1], self.file_names[index][1] + 1
+ )
+ val = tuple(val)
+ dvar = dataset.variables[self.variable]
+ retval = dvar[val]
+ if finalval is None:
+ finalval = retval
+ else:
+ finalval = np.concatenate(
+ (finalval, retval), axis=self.time_dim_index
+ )
+ dataset.close()
+ return finalval
+
+ except KeyError:
+ self.logger.error("Cannot find the requested variable " + self.variable)
+ except (IOError, RuntimeError):
+ self.logger.error("Cannot open the file " + self.file_names)
+ return None
+
+ def get_attribute_values(self, attr_name):
+ """Return the attribute value of the variable."""
+ try:
+ dataset = Dataset(self.file_names[0][0], "r")
+ dvar = dataset.variables[self.variable]
+ ret_val = {}
+ for name in attr_name:
+ try:
+ val = dvar.getncattr(name)
+ ret_val[name] = val
+ except AttributeError:
+ ret_val[name] = None
+ dataset.close()
+ return ret_val
+ except KeyError:
+ self.logger.error("Cannot find the requested variable " + self.variable)
+ except (IOError, RuntimeError):
+ self.logger.error("Cannot open the file " + self.file_names[0])
+ return None
+
+ def get_dimensions(self):
+ """Return the dimensions of the variables."""
+ try:
+ dataset = Dataset(self.file_names[0][0], "r")
+ dvar = dataset.variables[self.variable]
+ return dvar.dimensions
+ except KeyError:
+ self.logger.error("Cannot find the requested variable " + self.variable)
+ except (IOError, RuntimeError):
+ self.logger.error("Cannot open the file " + self.file_names[0])
+ return None
+
+ def set_time_dimension_index(self):
+ """Set the time dimension index."""
+ self.time_dim_index = -1
+ for index in range(len(self.dimensions)):
+ if self.dimensions[index] == self.time_counter_const:
+ self.time_dim_index = index
diff --git a/src/pybdy/reader/ncml.py b/src/pybdy/reader/ncml.py
index d0ac9b3f..5cf163ae 100644
--- a/src/pybdy/reader/ncml.py
+++ b/src/pybdy/reader/ncml.py
@@ -55,8 +55,8 @@ class Reader(object):
Examples
--------
- >>> reader = Reader("NCML Filename")
- >>> reader["t"]["votemper"][:, :, :, :]
+ reader = Reader("NCML Filename")
+ reader["t"]["votemper"][:, :, :, :]
"""
grid_type_list = ["t", "u", "v", "i"]
diff --git a/src/pybdy/tide/nemo_bdy_tide3.py b/src/pybdy/tide/nemo_bdy_tide3.py
index 42fcf7cc..675dc13e 100644
--- a/src/pybdy/tide/nemo_bdy_tide3.py
+++ b/src/pybdy/tide/nemo_bdy_tide3.py
@@ -20,12 +20,12 @@ def nemo_bdy_tide_rot(setup, DstCoord, Grid_T, Grid_U, Grid_V, comp):
Parameters
----------
- setup: settings
- DstCoord: ...
- Grid_T : grid_type, bdy_r
- Grid_U, Grid_V : bdy_i , grid_type, bdy_r
- comp: dictionary of harmonics read from namelist.
- e.g. {'1':"M2" , '2':"", ...}
+ setup : settings
+ DstCoord : destination coordinate object
+ Grid_T : t grid bdy_i, grid_type, bdy_r
+ Grid_U : u grid bdy_i, grid_type, bdy_r
+ Grid_V : v grid bdy_i, grid_type, bdy_r
+ comp : dictionary of harmonics read from namelist {'1':"M2" , '2':""}
Returns
-------
diff --git a/src/pybdy/variable.info b/src/pybdy/variable.info
index 727e7e7e..65348f83 100644
--- a/src/pybdy/variable.info
+++ b/src/pybdy/variable.info
@@ -1,8 +1,3 @@
-rn_hmin = min depth of the ocean (>0) or min number of ocean level (<0)
-rn_sbot_min = minimum depth of s-bottom surface (>0) (m)
-rn_sbot_max = maximum depth of s-bottom surface (= ocean depth) (>0) (m)
-ln_s_sigma = hybrid s-sigma coordinates
-rn_hc = critical depth with s-sigma
sn_src_hgr = source grid hgr
sn_src_zgr = source grid zgr
sn_dst_hgr = destination grid hgr
@@ -15,7 +10,7 @@ sn_dst_dir = directory where the output data files need to be stored
sn_fn = prefix for output files
nn_fv = set fill value for output files
nn_src_time_adj = src time adjustment
-sn_dst_metainfo = meta data information written to the output files.
+sn_dst_metainfo = meta data information written to the history of output files.
ln_coords_file = If true : produce bdy coordinates files
cn_coords_file = name of bdy coordinates files (if ln_coords_file is set to .TRUE.)
ln_mask_file = If .true. : read mask from file
@@ -43,7 +38,6 @@ sn_tide_dir =
sn_tide_fes =
nn_wei = smoothing filter weights
rn_r0 = decorrelation distance use in gauss smoothing onto dst points.Need to make this a funct. of dlon
-sn_history = history for netcdf file
ln_nemo3p4 = else presume v3.2 or v3.3
nn_alpha = Euler rotation angle
nn_beta = Euler rotation angle