Skip to content

Commit

Permalink
Fix typos (#562)
Browse files Browse the repository at this point in the history
  • Loading branch information
mwtoews authored Dec 12, 2024
1 parent e799d6f commit 0d4d87d
Show file tree
Hide file tree
Showing 42 changed files with 290 additions and 268 deletions.
2 changes: 1 addition & 1 deletion autotest/la_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -477,7 +477,7 @@ def ends_freyberg_dev():
ax.set_xlabel("new obs group")
ax.set_title("ensemble variance analysis for three Freyberg predictions",loc="left")
plt.tight_layout()
plt.savefig("precent.pdf")
plt.savefig("percent.pdf")
plt.close("all")


Expand Down
26 changes: 13 additions & 13 deletions autotest/pst_from_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,7 @@ def freyberg_test(tmp_path):
sfodf_c.columns = sfodf_c.columns.str.lower()
assert (sfrobs_p == sfodf_c.loc[sfrobs_p.index,
sfrobs_p.columns]).all().all(), (
"Mis-match between expected and processed obs values\n",
"Mismatch between expected and processed obs values\n",
sfrobs_p.head(),
sfodf_c.loc[sfrobs_p.index, sfrobs_p.columns].head())

Expand Down Expand Up @@ -338,7 +338,7 @@ def freyberg_test(tmp_path):
sfodf_c.columns = sfodf_c.columns.str.lower()
assert (sfrobs_p == sfodf_c.loc[sfrobs_p.index,
sfrobs_p.columns]).all().all(), (
"Mis-match between expected and processed obs values")
"Mismatch between expected and processed obs values")
obsnmes = pd.concat([df.obgnme for df in pf.obs_dfs]).unique()
assert all([gp in obsnmes for gp in ['qaquifer', 'qout']])
pf.post_py_cmds.append(
Expand Down Expand Up @@ -758,7 +758,7 @@ def mf6_freyberg_test(tmp_path):
# add the function call to make generic to the forward run script
pf.add_py_function(__file__, "generic_function()", is_pre_cmd=False)

# add a function that isnt going to be called directly
# add a function that isn't going to be called directly
pf.add_py_function(__file__, "another_generic_function(some_arg)",
is_pre_cmd=None)

Expand Down Expand Up @@ -2913,7 +2913,7 @@ def test_add_array_parameters(self):
os.chdir(self.dest_ws)
# first delete the model file in the template ws
model_file.unlink()
# manually apply a multipler
# manually apply a multiplier
mult = 4
mult_values = np.loadtxt(mult_file)
mult_values[:] = mult
Expand Down Expand Up @@ -2991,7 +2991,7 @@ def test_add_list_parameters(self):
os.chdir(self.dest_ws)
# first delete the model file in the template ws
model_file.unlink()
# manually apply a multipler
# manually apply a multiplier
mult = 4
mult_df = pd.read_csv(mult_file)
# no idea why '3' is the column with multipliers and 'parval1_3' isn't
Expand Down Expand Up @@ -3079,7 +3079,7 @@ def test_add_array_parameters_pps_grid(self):
# first delete the model file in the template ws
model_file = df['model_file'].values[mult2model_row]
os.remove(model_file)
# manually apply a multipler
# manually apply a multiplier
mult = 4
if par_type != "pilotpoints":
mult_values = np.loadtxt(mult_file)
Expand Down Expand Up @@ -3194,7 +3194,7 @@ def test_add_array_parameters_to_file_list(self):
for file in array_file_input:
shutil.copy(self.array_file, Path(self.dest_ws, file))

# single 2D zone array applied to each file in filesnames
# single 2D zone array applied to each file in filenames
self.pf.add_parameters(filenames=array_file_input, par_type='zone',
zone_array=self.zone_array,
par_name_base=tag, # basename for parameters that are set up
Expand All @@ -3211,7 +3211,7 @@ def test_add_array_parameters_to_file_list(self):
# first delete the model file in the template ws
for model_file in df['model_file']:
os.remove(model_file)
# manually apply a multipler
# manually apply a multiplier
mult = 4
mult_values = np.loadtxt(mult_file)
mult_values[:] = mult
Expand Down Expand Up @@ -3761,7 +3761,7 @@ def usg_freyberg_test(tmp_path):
for layer in layers:
df_lay = df.loc[df.layer==layer,:].copy()
df_lay.sort_values(by="node")
#substract off the min node number so that each layers node dict starts at zero
#subtract off the min node number so that each layers node dict starts at zero
df_lay.loc[:,"node"] = df_lay.node - df_lay.node.min()
print(df_lay)
srd = {n:xy for n,xy in zip(df_lay.node.values,df_lay.xy.values)}
Expand Down Expand Up @@ -4078,7 +4078,7 @@ def mf6_subdir_test(tmp_path):
# add the function call to make generic to the forward run script
pf.add_py_function(__file__, f"generic_function('{sd}')",is_pre_cmd=False)

# add a function that isnt going to be called directly
# add a function that isn't going to be called directly
pf.add_py_function(__file__, "another_generic_function(some_arg)",is_pre_cmd=None)

# pf.post_py_cmds.append("generic_function()")
Expand Down Expand Up @@ -5218,7 +5218,7 @@ def plot_thresh(m_d):

def test_array_fmt(tmp_path):
from pyemu.utils.pst_from import _load_array_get_fmt
# psuedo ff option
# pseudo ff option
with open(Path(tmp_path, "test.dat"), 'w') as fp:
fp.write(" 3.000 3.0000 03.000\n"
" 3.0 3.0000 03.000")
Expand All @@ -5239,14 +5239,14 @@ def test_array_fmt(tmp_path):
arr, fmt = _load_array_get_fmt(Path(tmp_path, "test.dat"))
assert fmt == ''.join([" %11.8F"] * 3)
assert arr.sum(axis=1).sum() == 18
# tru space delim option -- sep passed
# true space delim option -- sep passed
with open(Path(tmp_path, "test.dat"), 'w') as fp:
fp.write("3.000 3.00000 03.000\n"
"3.0 3.0000 03.000")
arr, fmt = _load_array_get_fmt(Path(tmp_path, "test.dat"), sep=' ')
assert fmt == "%7.5F"
assert arr.sum(axis=1).sum() == 18
# tru space delim option with sep None
# true space delim option with sep None
with open(Path(tmp_path, "test.dat"), 'w') as fp:
fp.write("3.000 3.00000 03.000\n"
"3.0 3.0000 03.000")
Expand Down
2 changes: 1 addition & 1 deletion autotest/pst_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@ def derivative_increment_test():
import pyemu

pst = pyemu.Pst(os.path.join("pst", "inctest.pst"))
pst.calculate_pertubations()
pst.calculate_perturbations()


def pestpp_args_test(tmp_path):
Expand Down
2 changes: 1 addition & 1 deletion autotest/smoother/10par_xsec/_master_stdout.dat
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@


pestpp-swp - a parameteric sweep utility
pestpp-swp - a parametric sweep utility
for PEST(++) datasets

by the PEST++ development team
Expand Down
10 changes: 5 additions & 5 deletions autotest/utils_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -2136,7 +2136,7 @@ def maha_pdc_summary_test(tmp_path): # todo add back in? currently super slowww
import pyemu
Path(tmp_path).mkdir(exist_ok=True)
l1_critical_value = 6.4 #chi squared value at df=1,p=0.01
l2_critical_value = 9.2 #chi sqaured value at df=2,p=0.01
l2_critical_value = 9.2 #chi squared value at df=2,p=0.01
pst_file = os.path.join("la", "pest.pst")
shutil.copy(pst_file, tmp_path)
pst = pyemu.Pst(os.path.join(tmp_path, "pest.pst"))
Expand Down Expand Up @@ -2730,14 +2730,14 @@ def pypestworker_test():
num_workers=5

# looper over and start the workers - in this
# case they dont need unique dirs since they arent writing
# case they dont need unique dirs since they aren't writing
# anything
procs = []
for i in range(num_workers):
pp = mp.Process(target=ppw_worker,args=(i,case,t_d,host,port,frun))
pp.start()
procs.append(pp)
# if everyhing worked, the the workers should recieve the
# if everything worked, the the workers should receive the
# shutdown signal from the master and exit gracefully...
for pp in procs:
pp.join()
Expand Down Expand Up @@ -3221,7 +3221,7 @@ def gpr_zdt1_test():
#return

# looper over and start the workers - in this
# case they dont need unique dirs since they arent writing
# case they dont need unique dirs since they aren't writing
# anything
procs = []
# try this test with 1 worker as an edge case
Expand All @@ -3230,7 +3230,7 @@ def gpr_zdt1_test():
pp = mp.Process(target=gpr_zdt1_ppw)
pp.start()
procs.append(pp)
# if everyhing worked, the the workers should recieve the
# if everything worked, the the workers should receive the
# shutdown signal from the master and exit gracefully...
for pp in procs:
pp.join()
Expand Down
2 changes: 1 addition & 1 deletion examples/MatrixCovariance_demo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"# Convience methods of `Matrix`\n",
"# Convenience methods of `Matrix`\n",
"\n",
"several cool things are implemented in `Matrix` and accessed through `@property` decorated methods. For example, the SVD components of a `Matrix` object are simply accessed by name. The SVD routine is called on demand and the components are cast to `Matrix` objects, all opaque to the user:"
]
Expand Down
6 changes: 3 additions & 3 deletions examples/Schurexample_freyberg.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
"## Model background\n",
"This example is based on the synthetic classroom model of Freyberg(1988). The model is a 2-dimensional MODFLOW model with 1 layer, 40 rows, and 20 columns. The model has 2 stress periods: an initial steady-state stress period used for calibration, and a 5-year transient stress period. The calibration period uses the recharge and well flux of Freyberg(1988); the last stress period use 25% less recharge and 25% more pumping to represent future conditions for a forecast period.\n",
"\n",
"The inverse problem has 761 parameters: hydraulic conductivity of each active model cell, calibration and forecast period recharge multipliers, storage and specific yield, calibration and forecast well flux for each of the six wells, and river bed conductance for each 40 cells with river-type boundary conditions. The inverse problem has 12 head obseravtions, measured at the end of the steady-state calibration period. The forecasts of interest include the sw-gw exchange flux during both stress periods (observations named ```sw_gw_0``` and ``sw_gw_1``), and the water level in well cell 6 located in at row 28 column 5 at the end of the stress periods (observations named ```or28c05_0``` and ```or28c05_1```). The forecasts are included in the Jacobian matrix as zero-weight observations. The model files, pest control file and previously-calculated jacobian matrix are in the `freyberg/` folder\n",
"The inverse problem has 761 parameters: hydraulic conductivity of each active model cell, calibration and forecast period recharge multipliers, storage and specific yield, calibration and forecast well flux for each of the six wells, and river bed conductance for each 40 cells with river-type boundary conditions. The inverse problem has 12 head observations, measured at the end of the steady-state calibration period. The forecasts of interest include the sw-gw exchange flux during both stress periods (observations named ```sw_gw_0``` and ``sw_gw_1``), and the water level in well cell 6 located in at row 28 column 5 at the end of the stress periods (observations named ```or28c05_0``` and ```or28c05_1```). The forecasts are included in the Jacobian matrix as zero-weight observations. The model files, pest control file and previously-calculated jacobian matrix are in the `freyberg/` folder\n",
"\n",
"\n",
"Freyberg, David L. \"AN EXERCISE IN GROUND‐WATER MODEL CALIBRATION AND PREDICTION.\" Groundwater 26.3 (1988): 350-360."
Expand All @@ -65,7 +65,7 @@
"outputs": [],
"source": [
"# Because this model is old -- it predates flopy's modelgrid implementation. \n",
"# And because modelgrid has been implemented without backward compatability \n",
"# And because modelgrid has been implemented without backward compatibility \n",
"# the modelgrid object is not constructed properly. \n",
"# - We will use some sneaky pyemu to get things to how they should be \n",
"import pyemu\n",
Expand Down Expand Up @@ -166,7 +166,7 @@
"source": [
"# just set the path and filename for the jco file\n",
"jco = os.path.join(\"Freyberg\",\"freyberg.jcb\") \n",
"# use the jco name with extention \"pst\" for the control file\n",
"# use the jco name with extension \"pst\" for the control file\n",
"pst = pyemu.Pst(jco.replace(\".jcb\",\".pst\")) \n",
"# get the list of forecast names from the pest++ argument\n",
"la = pyemu.Schur(jco=jco, pst=pst, verbose=\"schur_example.log\")\n",
Expand Down
4 changes: 2 additions & 2 deletions examples/Schurexample_henry.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"The inverse problem has 603 parameters: 600 hydraulic conductivity pilot points, 1 global hydraulic conductivity, 1 specified flux multiplier for history matching and 1 specified flux multiplier for forecast conditions. The inverse problem has 36 obseravtions (21 heads and 15 concentrations) measured at the end of the steady-state calibration period. The forecasts of interest of the distance from the left model edge to the 10% seawater concentration in the basal model layer and the concentration at location 10. Both of there forecasts are \"measured\" at the end of the forecast stress period. The forecasts are both in the Jacobian matrix as zero-weight observations named `pd_ten` and `C_obs10_2`.I previously calculated the jacobian matrix, which is in the `henry/` folder, along with the PEST control file.\n",
"The inverse problem has 603 parameters: 600 hydraulic conductivity pilot points, 1 global hydraulic conductivity, 1 specified flux multiplier for history matching and 1 specified flux multiplier for forecast conditions. The inverse problem has 36 observations (21 heads and 15 concentrations) measured at the end of the steady-state calibration period. The forecasts of interest of the distance from the left model edge to the 10% seawater concentration in the basal model layer and the concentration at location 10. Both of there forecasts are \"measured\" at the end of the forecast stress period. The forecasts are both in the Jacobian matrix as zero-weight observations named `pd_ten` and `C_obs10_2`.I previously calculated the jacobian matrix, which is in the `henry/` folder, along with the PEST control file.\n",
"\n"
]
},
Expand Down Expand Up @@ -222,7 +222,7 @@
"## parameter contribution to forecast uncertainty\n",
"\n",
"\n",
"Lets look at which parameters are contributing most to forecast uncertainty, which we estimate as the decrese in forecast uncertainty from \"perfect\" knowledge of one or more parameters. for demostration purposes, lets group the hydraulic conductivity parameters by row."
"Lets look at which parameters are contributing most to forecast uncertainty, which we estimate as the decrease in forecast uncertainty from \"perfect\" knowledge of one or more parameters. for demonstration purposes, lets group the hydraulic conductivity parameters by row."
]
},
{
Expand Down
Loading

0 comments on commit 0d4d87d

Please sign in to comment.