Skip to content

Commit 3de8e9b

Browse files
authored
Merge pull request #95 from ACCESS-Cloud-Based-InSAR/dev
v2.5.7
2 parents dfd0bb1 + 0f5584f commit 3de8e9b

32 files changed

+425
-364
lines changed

.github/workflows/changelog.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -13,4 +13,4 @@ on:
1313

1414
jobs:
1515
call-changelog-check-workflow:
16-
uses: ASFHyP3/actions/.github/workflows/[email protected].1
16+
uses: ASFHyP3/actions/.github/workflows/[email protected].2

.github/workflows/labeled-pr.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,4 +12,4 @@ on:
1212

1313
jobs:
1414
call-labeled-pr-check-workflow:
15-
uses: ASFHyP3/actions/.github/workflows/[email protected].1
15+
uses: ASFHyP3/actions/.github/workflows/[email protected].2

.github/workflows/release.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ on:
77

88
jobs:
99
call-release-workflow:
10-
uses: ASFHyP3/actions/.github/workflows/[email protected].1
10+
uses: ASFHyP3/actions/.github/workflows/[email protected].2
1111
with:
1212
release_prefix: dem-stitcher
1313
develop_branch: dev

.github/workflows/static-analysis.yml

+3-3
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,12 @@ on: push
44

55
jobs:
66
call-flake8-workflow:
7-
uses: ASFHyP3/actions/.github/workflows/[email protected].1
7+
uses: ASFHyP3/actions/.github/workflows/[email protected].2
88
with:
99
local_package_names: dem_stitcher
1010

1111
call-secrets-analysis-workflow:
12-
uses: ASFHyP3/actions/.github/workflows/[email protected].1
12+
uses: ASFHyP3/actions/.github/workflows/[email protected].2
1313

1414
call-ruff-workflow:
15-
uses: ASFHyP3/actions/.github/workflows/[email protected].1
15+
uses: ASFHyP3/actions/.github/workflows/[email protected].2

.github/workflows/tag.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ on:
77

88
jobs:
99
call-bump-version-workflow:
10-
uses: ASFHyP3/actions/.github/workflows/[email protected].1
10+
uses: ASFHyP3/actions/.github/workflows/[email protected].2
1111
with:
1212
user: access-cloud-insar-team
1313

CHANGELOG.md

+10
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,16 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
66
and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
77
and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
88

9+
## [2.5.7]
10+
11+
### Fixed
12+
* Check for Earthdata credentials in netrc (adapted from Joe Kennedy/Forrest Williams) resolving isse [#83](https://github.com/ACCESS-Cloud-Based-InSAR/dem-stitcher/issues/83)
13+
* when no credentials in netrc are present when requesting data for `nasadem` or `srtm_v3`, there is a human readable error instructing user to update their `~/.netrc`.
14+
* Updates some ruff linting
15+
* Ensures ruff in `environment.yml`
16+
* Ensure single quotes for consistency.
17+
18+
919
## [2.5.6]
1020
* Updated URLs for downloading geoids from agisoft.com. Fixes [#88](https://github.com/ACCESS-Cloud-Based-InSAR/dem-stitcher/issues/88).
1121

README.md

+1-2
Original file line numberDiff line numberDiff line change
@@ -67,8 +67,7 @@ The creation metadata unrelated to georeferencing (e.g. the `compress` key or va
6767
## Credentials
6868

6969
The accessing of NASADEM and SRTM require earthdata login credentials to be put into the `~/.netrc` file. If these are not present, the stitcher will
70-
fail with `BadZipFile Error` as we use `requests` to obtain zipped data and load the data using `rasterio`. An entry in the `.netrc` will look like:
71-
70+
fail with `ValueError` asking you to update the `~/.netrc`. The appropriate entry appears as:
7271
```
7372
machine urs.earthdata.nasa.gov
7473
login <username>

dem_stitcher/__init__.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -11,17 +11,17 @@
1111
except PackageNotFoundError:
1212
__version__ = None
1313
warnings.warn(
14-
"package is not installed!\n"
15-
"Install in editable/develop mode via (from the top of this repo):\n"
16-
" python -m pip install -e .\n",
14+
'package is not installed!\n'
15+
'Install in editable/develop mode via (from the top of this repo):\n'
16+
' python -m pip install -e .\n',
1717
RuntimeWarning,
1818
)
1919

2020

2121
__all__ = [
22-
"get_dem_tile_paths",
23-
"get_global_dem_tile_extents",
24-
"get_overlapping_dem_tiles",
25-
"stitch_dem",
26-
"__version__",
22+
'get_dem_tile_paths',
23+
'get_global_dem_tile_extents',
24+
'get_overlapping_dem_tiles',
25+
'stitch_dem',
26+
'__version__',
2727
]

dem_stitcher/credentials.py

+23
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
import netrc
2+
from pathlib import Path
3+
4+
5+
def ensure_earthdata_credentials(
6+
host: str = 'urs.earthdata.nasa.gov',
7+
):
8+
"""
9+
Source: DockerizedTopsapp / Authors: Joseph Kennedy, Forrest Williams, and Andrew Johnston
10+
11+
Ensures Earthdata credentials are provided in ~/.netrc
12+
13+
Earthdata username and password may be provided by, in order of preference, one of:
14+
* `netrc_file`
15+
* `username` and `password`
16+
and will be written to the ~/.netrc file if it doesn't already exist.
17+
"""
18+
netrc_file = Path.home() / '.netrc'
19+
try:
20+
dot_netrc = netrc.netrc(netrc_file)
21+
_, _, _ = dot_netrc.authenticators(host)
22+
except (FileNotFoundError, netrc.NetrcParseError, TypeError):
23+
raise ValueError(f'Please provide valid Earthdata login credentials via {netrc_file}')

dem_stitcher/datasets.py

+12-12
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,11 @@
1212
from .exceptions import DEMNotSupported
1313
from .geojson_io import read_geojson_gzip
1414

15-
DATA_PATH = Path(__file__).parents[0].absolute() / "data"
15+
DATA_PATH = Path(__file__).parents[0].absolute() / 'data'
1616

1717
# Get Datasets
18-
_DATASET_PATHS = list(DATA_PATH.glob("*.geojson.zip"))
19-
DATASETS = list(map(lambda x: x.name.split(".")[0], _DATASET_PATHS))
18+
_DATASET_PATHS = list(DATA_PATH.glob('*.geojson.zip'))
19+
DATASETS = list(map(lambda x: x.name.split('.')[0], _DATASET_PATHS))
2020

2121

2222
def get_available_datasets():
@@ -45,8 +45,8 @@ def get_global_dem_tile_extents(dataset: str) -> gpd.GeoDataFrame:
4545
"""
4646
if dataset not in DATASETS:
4747
raise DEMNotSupported(f'{dataset} must be in {", ".join(DATASETS)}')
48-
df = read_geojson_gzip(DATA_PATH / f"{dataset}.geojson.zip")
49-
df["dem_name"] = dataset
48+
df = read_geojson_gzip(DATA_PATH / f'{dataset}.geojson.zip')
49+
df['dem_name'] = dataset
5050
df.crs = CRS.from_epsg(4326)
5151
return df
5252

@@ -81,9 +81,9 @@ def get_overlapping_dem_tiles(bounds: list, dem_name: str) -> gpd.GeoDataFrame:
8181
crossing = get_dateline_crossing(bounds)
8282
if crossing:
8383
warn(
84-
"Getting tiles across dateline on the opposite hemisphere; "
85-
f"The source tiles will be {- 2 * crossing} deg along the"
86-
"longitudinal axis from the extent requested",
84+
'Getting tiles across dateline on the opposite hemisphere; '
85+
f'The source tiles will be {- 2 * crossing} deg along the'
86+
'longitudinal axis from the extent requested',
8787
category=UserWarning,
8888
)
8989
df_tiles_all_translated = df_tiles_all.copy()
@@ -100,18 +100,18 @@ def get_overlapping_dem_tiles(bounds: list, dem_name: str) -> gpd.GeoDataFrame:
100100
if not df_tiles.empty:
101101
# Degenerate geometries raise warning in shapely - intersection is black box to us
102102
with warnings.catch_warnings():
103-
warnings.simplefilter("ignore", category=RuntimeWarning)
103+
warnings.simplefilter('ignore', category=RuntimeWarning)
104104
df_tiles_intersection = df_tiles.geometry.intersection(box_geo)
105-
geo_type_index = df_tiles_intersection.geometry.map(lambda geo: geo.geom_type == "Polygon")
105+
geo_type_index = df_tiles_intersection.geometry.map(lambda geo: geo.geom_type == 'Polygon')
106106
df_tiles = df_tiles[geo_type_index].copy()
107107

108108
# Merging is order dependent - ensures consistency
109-
df_tiles = df_tiles.sort_values(by="tile_id")
109+
df_tiles = df_tiles.sort_values(by='tile_id')
110110
df_tiles = df_tiles.reset_index(drop=True)
111111
return df_tiles
112112

113113

114114
def intersects_missing_glo_30_tiles(extent: list) -> bool:
115115
extent_geo = box(*extent)
116-
df_missing = get_overlapping_dem_tiles(extent, "glo_90_missing")
116+
df_missing = get_overlapping_dem_tiles(extent, 'glo_90_missing')
117117
return df_missing.intersects(extent_geo).sum() > 0

dem_stitcher/dateline.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -10,19 +10,19 @@ def check_4326_bounds(bounds: list) -> bool:
1010
xmin, ymin, xmax, ymax = bounds
1111

1212
if (xmin > xmax) or (ymin > ymax):
13-
raise Incorrect4326Bounds("Ensure xmin <= xmax and ymin <= ymax")
13+
raise Incorrect4326Bounds('Ensure xmin <= xmax and ymin <= ymax')
1414

1515
standard_4326_box = box(-180, -90, 180, 90)
1616
bounds_box = box(*bounds)
1717

1818
if not (standard_4326_box.intersects(bounds_box)):
1919
raise Incorrect4326Bounds(
20-
"Make sure bounds have intersection over standard 4326 CRS i.e. "
21-
"between longitude -180 and 180 and latitude -90 and 90."
20+
'Make sure bounds have intersection over standard 4326 CRS i.e. '
21+
'between longitude -180 and 180 and latitude -90 and 90.'
2222
)
2323

2424
if (ymin < -90) or (ymax > 90):
25-
raise Incorrect4326Bounds("Boxes beyond the North/South Pole at +/- 90 Latitude not supported")
25+
raise Incorrect4326Bounds('Boxes beyond the North/South Pole at +/- 90 Latitude not supported')
2626

2727
return True
2828

@@ -65,7 +65,7 @@ def get_dateline_crossing(bounds: list) -> int:
6565
return 180
6666

6767
elif (xmin <= -180) and (xmax >= 180):
68-
raise DoubleDatelineCrossing("Shrink your bounding area")
68+
raise DoubleDatelineCrossing('Shrink your bounding area')
6969

7070

7171
def split_extent_across_dateline(extent: list) -> tuple[list]:
@@ -99,7 +99,7 @@ def split_extent_across_dateline(extent: list) -> tuple[list]:
9999
multipolygon = extent_box.union(extent_box_t)
100100

101101
with warnings.catch_warnings():
102-
warnings.simplefilter("ignore", category=RuntimeWarning)
102+
warnings.simplefilter('ignore', category=RuntimeWarning)
103103
bounds_l = list(multipolygon.intersection(left_hemisphere).bounds)
104104
bounds_r = list(multipolygon.intersection(right_hemisphere).bounds)
105105
return (bounds_l, bounds_r)

dem_stitcher/dem_readers.py

+12-12
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,9 @@ def read_dem(dem_path: str) -> rasterio.DatasetReader:
1515
return dem_arr, dem_profile
1616

1717

18-
def read_dem_bytes(dem_path: str, suffix: str = ".img") -> bytes:
18+
def read_dem_bytes(dem_path: str, suffix: str = '.img') -> bytes:
1919
# online
20-
if (dem_path[:7] == "http://") or (dem_path[:8] == "https://"):
20+
if (dem_path[:7] == 'http://') or (dem_path[:8] == 'https://'):
2121
resp = requests.get(dem_path)
2222
data = io.BytesIO(resp.content)
2323
# local file
@@ -35,17 +35,17 @@ def read_dem_bytes(dem_path: str, suffix: str = ".img") -> bytes:
3535
return img_bytes
3636

3737

38-
def read_srtm(dem_path: str, version="srtm") -> Tuple[np.ndarray, dict]:
39-
img_bytes = read_dem_bytes(dem_path, suffix=".hgt")
38+
def read_srtm(dem_path: str, version='srtm') -> Tuple[np.ndarray, dict]:
39+
img_bytes = read_dem_bytes(dem_path, suffix='.hgt')
4040
# The gdal driver hgt depends on filename convention
41-
filename = dem_path.split("/")[-1]
42-
if version == "srtm":
43-
filename = filename.replace(".zip", "")
44-
elif version == "nasadem":
45-
filename = filename.replace(".zip", ".hgt")
46-
filename = filename.replace("NASADEM_HGT_", "")
41+
filename = dem_path.split('/')[-1]
42+
if version == 'srtm':
43+
filename = filename.replace('.zip', '')
44+
elif version == 'nasadem':
45+
filename = filename.replace('.zip', '.hgt')
46+
filename = filename.replace('NASADEM_HGT_', '')
4747
else:
48-
raise ValueError("version must be either nasadem or srtm")
48+
raise ValueError('version must be either nasadem or srtm')
4949

5050
with MemoryFile(img_bytes, filename=filename) as memfile:
5151
with memfile.open() as dataset:
@@ -56,4 +56,4 @@ def read_srtm(dem_path: str, version="srtm") -> Tuple[np.ndarray, dict]:
5656

5757

5858
def read_nasadem(dem_path: str) -> Tuple[np.ndarray, dict]:
59-
return read_srtm(dem_path, version="nasadem")
59+
return read_srtm(dem_path, version='nasadem')

dem_stitcher/geoid.py

+18-18
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,11 @@
1111
from .rio_tools import reproject_arr_to_match_profile, translate_profile
1212
from .rio_window import read_raster_from_window
1313

14-
AGISOFT_URL = "https://s3-eu-west-1.amazonaws.com/download.agisoft.com/geoids"
14+
AGISOFT_URL = 'https://s3-eu-west-1.amazonaws.com/download.agisoft.com/geoids'
1515
GEOID_PATHS_AGI = {
16-
"geoid_18": f"{DATA_PATH}/geoid_18.tif",
17-
"egm_08": f"{AGISOFT_URL}/egm2008-1.tif",
18-
"egm_96": f"{AGISOFT_URL}/egm96-15.tif",
16+
'geoid_18': f'{DATA_PATH}/geoid_18.tif',
17+
'egm_08': f'{AGISOFT_URL}/egm2008-1.tif',
18+
'egm_96': f'{AGISOFT_URL}/egm96-15.tif',
1919
}
2020

2121

@@ -46,7 +46,7 @@ def read_geoid(geoid_name: str, extent: list = None, res_buffer: int = 1) -> tup
4646
geoid_arr_r, geoid_profile_r = read_raster_from_window(
4747
geoid_path, extent_r, extent_crs, res_buffer=res_buffer
4848
)
49-
res_x = geoid_profile_l["transform"].a
49+
res_x = geoid_profile_l['transform'].a
5050
if crossing == 180:
5151
geoid_profile_l = translate_profile(geoid_profile_l, 360 / res_x, 0)
5252
else:
@@ -55,42 +55,42 @@ def read_geoid(geoid_name: str, extent: list = None, res_buffer: int = 1) -> tup
5555
[geoid_arr_l, geoid_arr_r], [geoid_profile_l, geoid_profile_r]
5656
)
5757
# Transform nodata to nan
58-
geoid_arr = geoid_arr.astype("float32")
59-
geoid_arr[geoid_profile["nodata"] == geoid_arr] = np.nan
60-
geoid_profile["nodata"] = np.nan
58+
geoid_arr = geoid_arr.astype('float32')
59+
geoid_arr[geoid_profile['nodata'] == geoid_arr] = np.nan
60+
geoid_profile['nodata'] = np.nan
6161

6262
return geoid_arr, geoid_profile
6363

6464

6565
def remove_geoid(
66-
dem_arr: np.ndarray, dem_profile: dict, geoid_name: str, dem_area_or_point: str = "Area", res_buffer: int = 2
66+
dem_arr: np.ndarray, dem_profile: dict, geoid_name: str, dem_area_or_point: str = 'Area', res_buffer: int = 2
6767
) -> np.ndarray:
68-
assert dem_area_or_point in ["Point", "Area"]
68+
assert dem_area_or_point in ['Point', 'Area']
6969

70-
extent = array_bounds(dem_profile["height"], dem_profile["width"], dem_profile["transform"])
70+
extent = array_bounds(dem_profile['height'], dem_profile['width'], dem_profile['transform'])
7171

7272
geoid_arr, geoid_profile = read_geoid(geoid_name, extent=list(extent), res_buffer=res_buffer)
7373

74-
t_dem = dem_profile["transform"]
75-
t_geoid = geoid_profile["transform"]
74+
t_dem = dem_profile['transform']
75+
t_geoid = geoid_profile['transform']
7676
res_dem = max(t_dem.a, abs(t_dem.e))
7777
res_geoid = max(t_geoid.a, abs(t_geoid.e))
7878

7979
if res_geoid * res_buffer <= res_dem:
8080
buffer_recommendation = int(np.ceil(res_dem / res_geoid))
8181
warning = (
82-
"The dem resolution is larger than the geoid resolution and its buffer; "
83-
"Edges resampled with bilinear interpolation will be inconsistent so select larger buffer."
84-
f"Select a `res_buffer = {buffer_recommendation}`"
82+
'The dem resolution is larger than the geoid resolution and its buffer; '
83+
'Edges resampled with bilinear interpolation will be inconsistent so select larger buffer.'
84+
f'Select a `res_buffer = {buffer_recommendation}`'
8585
)
8686
warnings.warn(warning, category=UserWarning)
8787

8888
# Translate geoid if necessary as all geoids have Area tag
89-
if dem_area_or_point == "Point":
89+
if dem_area_or_point == 'Point':
9090
shift = -0.5
9191
geoid_profile = translate_profile(geoid_profile, shift, shift)
9292

93-
geoid_offset, _ = reproject_arr_to_match_profile(geoid_arr, geoid_profile, dem_profile, resampling="bilinear")
93+
geoid_offset, _ = reproject_arr_to_match_profile(geoid_arr, geoid_profile, dem_profile, resampling='bilinear')
9494

9595
dem_arr_offset = dem_arr + geoid_offset
9696
return dem_arr_offset

dem_stitcher/geojson_io.py

+9-9
Original file line numberDiff line numberDiff line change
@@ -9,26 +9,26 @@
99

1010

1111
def read_geojson_gzip(input_zip_path: Union[str, Path]) -> gpd.GeoDataFrame:
12-
with gzip.GzipFile(input_zip_path, "r") as file_in:
13-
data_gjson = json.loads(file_in.read().decode("utf-8"))
14-
return gpd.GeoDataFrame.from_features(data_gjson["features"], crs=CRS.from_epsg(4326))
12+
with gzip.GzipFile(input_zip_path, 'r') as file_in:
13+
data_gjson = json.loads(file_in.read().decode('utf-8'))
14+
return gpd.GeoDataFrame.from_features(data_gjson['features'], crs=CRS.from_epsg(4326))
1515

1616

1717
def to_geojson_obj(geodataframe: gpd.geodataframe.GeoDataFrame) -> dict:
18-
features = geodataframe.to_dict("records")
18+
features = geodataframe.to_dict('records')
1919

2020
def mapping_geojson(entry):
21-
geometry = entry.pop("geometry")
22-
new_entry = {"type": "Feature", "properties": entry, "geometry": shapely.geometry.mapping(geometry)}
21+
geometry = entry.pop('geometry')
22+
new_entry = {'type': 'Feature', 'properties': entry, 'geometry': shapely.geometry.mapping(geometry)}
2323
return new_entry
2424

2525
features = list(map(mapping_geojson, features))
26-
geojson = {"type": "FeatureCollection", "features": features}
26+
geojson = {'type': 'FeatureCollection', 'features': features}
2727
return geojson
2828

2929

3030
def to_geojson_gzip(geodataframe: gpd.geodataframe.GeoDataFrame, dest_path: str) -> Path:
3131
geojson_ob = to_geojson_obj(geodataframe)
32-
with gzip.GzipFile(dest_path, "w") as file_out:
33-
file_out.write(json.dumps(geojson_ob).encode("utf-8"))
32+
with gzip.GzipFile(dest_path, 'w') as file_out:
33+
file_out.write(json.dumps(geojson_ob).encode('utf-8'))
3434
return dest_path

0 commit comments

Comments
 (0)