Skip to content

Commit

Permalink
Merge pull request #6 from DWesl/review-edits
Browse files Browse the repository at this point in the history
Change eddy-covariance term to nugget effect and decoupled interpolation time to one month
  • Loading branch information
DWesl committed Aug 30, 2023
2 parents 2339f14 + da3b8c7 commit 448245d
Show file tree
Hide file tree
Showing 15 changed files with 994 additions and 409 deletions.
8 changes: 7 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
# Editor backups
*~
\#*\#

# Python crashes
*.stackdump

Expand All @@ -12,9 +14,12 @@
*.csv
*.xlsx
*.nc4
*.txt
nohup.out

# C extensions, generated from autogenerated cython files
*.dll
*.so
*.c
*.cpp
flux_correlation_function*.html
Expand All @@ -34,6 +39,7 @@ build
# Packaging stuff
*.egg-info
/dist/
*.tar.gz

# Other files
\#*#
Expand All @@ -46,4 +52,4 @@ build
nohup.out

# NCO leftovers
*.nc*.pid*.*.tmp
*.nc*.pid*.*.tmp
63 changes: 44 additions & 19 deletions ameriflux_vs_casa_daily_cycles.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,16 +11,17 @@

import cycler
import matplotlib as mpl

# mpl.interactive(True)
# mpl.use("TkAgg")
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import pytz
import seaborn as sns
import xarray

# mpl.interactive(True)
# mpl.use("TkAgg")


MINUTES_PER_HOUR = 60
SECONDS_PER_HOUR = 3600
HOURS_PER_DAY = 24
Expand Down Expand Up @@ -84,23 +85,47 @@ def parse_file(ameriflux_file):

if __name__ == "__main__":
ARGS = PARSER.parse_args()
TOWER_DATA = [
pd.concat(
[
parse_file(name)
for name in glob.glob(
os.path.join(ARGS.ameriflux_root, site_dir, "*_h.txt")
)
],
axis=0,
# TOWER_DATA = [
# pd.concat(
# [
# parse_file(name)
# for name in glob.glob(
# os.path.join(ARGS.ameriflux_root, site_dir, "*_h.txt")
# )
# ],
# axis=0,
# )
# for site_dir in os.listdir(ARGS.ameriflux_root)
# if os.path.isdir(os.path.join(ARGS.ameriflux_root, site_dir))
# and glob.glob(os.path.join(ARGS.ameriflux_root, site_dir, "*_h.txt"))
# ]

# TOWER_DF = pd.concat(TOWER_DATA, axis=1)
# HOURLY_DATA = TOWER_DF.resample("1H").mean()

HALF_HOUR_TOWERS = (
xarray.open_dataset(
os.path.join(
ARGS.ameriflux_root,
"AmeriFlux_all_CO2_fluxes_with_single_estimate_per_tower_half_hour_data.nc4",
)
)
for site_dir in os.listdir(ARGS.ameriflux_root)
if os.path.isdir(os.path.join(ARGS.ameriflux_root, site_dir))
and glob.glob(os.path.join(ARGS.ameriflux_root, site_dir, "*_h.txt"))
]

TOWER_DF = pd.concat(TOWER_DATA, axis=1)
HOURLY_DATA = TOWER_DF.resample("1H").mean()
.resample(TIMESTAMP_START="1H")
.mean()
)
HOUR_TOWERS = xarray.open_dataset(
os.path.join(
ARGS.ameriflux_root,
"AmeriFlux_all_CO2_fluxes_with_single_estimate_per_tower_hour_data.nc4",
)
)
HOURLY_DATA = xarray.concat(
[
HALF_HOUR_TOWERS["ameriflux_carbon_dioxide_flux_estimate"],
HOUR_TOWERS["ameriflux_carbon_dioxide_flux_estimate"],
],
dim="site",
).to_series()

CASA_DATA = xarray.open_mfdataset(
[
Expand Down
Empty file modified correlation_function_descriptive_plots.py
100644 → 100755
Empty file.
26 changes: 18 additions & 8 deletions correlation_function_fits.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,10 +91,12 @@ def get_short_name(self):
]



class PartForm(Enum):
"""Describe one part of a correlation function."""

O = "None"
D = "Decoupled"
C = "3-term cosine series"
P = "Exponential sine-squared"

Expand All @@ -103,7 +105,6 @@ class PartForm(Enum):
PERIODIC = P
EXPSIN2 = P

D = "Decoupled"
DECOUPLED = D
G = D
GEOSTAT = G
Expand Down Expand Up @@ -166,12 +167,21 @@ def get_expression(self, part):
elif self == PartForm.PERIODIC:
main = "exp(-(sin(PI_OVER_{time:s} * tdata) / {0:s}_width) ** 2)"
elif self == PartForm.GEOSTAT:
if part == CorrelationPart.DAILY:
# Three hours
cutoff_one = 1./8
else:
# One month
cutoff_one = 1./12
cutoff_two = 1 - cutoff_one
slope = 1/cutoff_one
main = (
"where(((tdata / DAYS_PER_{time:s}) % 1) < 0.125, "
"1 - 8 * ((tdata / DAYS_PER_{time:s}) % 1), "
"where(((tdata / DAYS_PER_{time:s}) % 1) > 0.875, "
"8 * ((tdata / DAYS_PER_{time:s}) % 1 - 0.875), 0))"
)
"where(((tdata / DAYS_PER_{{time:s}}) % 1) < {cutoff_one:f},"
" 1 - {slope:.1f} * ((tdata / DAYS_PER_{{time:s}}) % 1),"
" where(((tdata / DAYS_PER_{{time:s}}) % 1) > {cutoff_two:f},"
" {slope:.1f} * ((tdata / DAYS_PER_{{time:s}}) % 1 - {cutoff_two:f}),"
" 0))"
).format(cutoff_one=cutoff_one, cutoff_two=cutoff_two, slope=slope)

if not part.is_modulation():
# The exponential die-off is only for the main
Expand Down Expand Up @@ -310,7 +320,7 @@ def get_full_expression(part_daily, part_day_mod, part_annual):
return (
"{0:s} * {1:s} + {2:s} + "
"resid_coef * exp(-tdata / (resid_timescale * DAYS_PER_FORTNIGHT)) + "
"ec_coef * exp(-tdata / ec_timescale * HOURS_PER_DAY)"
"ec_coef * where(tdata == 0, 1, 0)"
).format(
part_daily.get_expression(CorrelationPart.DAILY),
part_day_mod.get_expression(CorrelationPart.DAILY_MODULATION),
Expand Down Expand Up @@ -343,7 +353,7 @@ def get_full_parameter_list(part_daily, part_day_mod, part_annual):
)
for param in form.get_parameters(time)
]
result.extend(["resid_coef", "resid_timescale", "ec_coef", "ec_timescale"])
result.extend(["resid_coef", "resid_timescale", "ec_coef"])
return result


Expand Down
2 changes: 1 addition & 1 deletion correlation_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def get_autocorrelation_stats(column):
result.loc[:, "acovf"] = acovf(
column,
missing="conservative",
unbiased=True,
adjusted=True,
fft=True,
).astype(np.float32)
result.loc[:, "acf"] = acf(
Expand Down
Loading

0 comments on commit 448245d

Please sign in to comment.