-
Notifications
You must be signed in to change notification settings - Fork 6
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #52 from johbackm/dev
added multiprocessing to gaussian_fit(...parallel=[False,'dask','multiprocessing']...), added deadtime correction to process_psds(...deadtime_correction=True...)) and added OneofEvery to the .dat particle-by-particle files.
- Loading branch information
Showing
9 changed files
with
334 additions
and
64 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,79 @@ | ||
import numpy as np | ||
import xarray as xr | ||
|
||
def deadtime(my_binary, my_ini): | ||
""" | ||
Function to calculate Deadtime Bias in the SP2 data as published in | ||
Joshua. P. Schwarz, J. M. Katich, S. L. Lee, D. S. Thomson & L. A. Watts | ||
(2022) “Invisible bias” in the single particle soot photometer due to | ||
trigger deadtime, Aerosol Science and Technology, 56:7, 623-635, | ||
DOI: 10.1080/02786826.2022.2064265 | ||
Parameters | ||
---------- | ||
my_binary : xarrray Dataset | ||
xarray Dataset that is the output of pysp2.util.gaussian_fit(my_sp2b, my_ini) | ||
Dataset with gaussian fits, peak heights etc. | ||
my_ini : dict | ||
The .ini file loaded as a dict. my_ini=pysp2.io.read_config(ini_file) | ||
Returns | ||
------- | ||
my_binary : xarrray Dataset | ||
Returns the input xarray dataset with one additional variable: | ||
DeadtimeRelativeBias. This can be used to correct for Deadtime Bias | ||
in the SP2 at high concentrations. | ||
""" | ||
#numpy array to store the relative bias in | ||
bias = np.zeros(my_binary.dims['event_index'], ) | ||
#scattering triggered | ||
scatter_triggered = np.any(np.isfinite(np.vstack((my_binary['PkHt_ch0'].values, | ||
my_binary['PkHt_ch4'].values))), axis=0) | ||
#incandesence triggered | ||
incandesence_triggered = np.any(np.isfinite(np.vstack((my_binary['PkHt_ch1'].values, | ||
my_binary['PkHt_ch5'].values))), axis=0) | ||
digitization_rate = int(my_ini['Acquisition']['Samples/Sec']) | ||
buffer_length = int(my_ini['Acquisition']['Scan Length']) #how many data points in one buffer | ||
buffer_length_seconds = buffer_length/digitization_rate #in seconds | ||
event_length_points = int(my_ini['Acquisition']['Points per Event']) | ||
pretrigger_length_points = int(my_ini['Acquisition']['Pre-Trig Points']) | ||
#intentialanny skipped particles, notation from Eq. 2 in doi:10.1080/02786826.2022.2064265 | ||
S_S = int(my_ini['Acquisition']['1 of every']) | ||
#find locations where the buffer changes (i.e. where the TimeWave values changes) | ||
#get unique time stamps | ||
unique_buffer_time_stamps = np.unique(my_binary['TimeWave'].values) | ||
#find locations where those unique time stamps should be inserted | ||
ind = np.searchsorted(my_binary['TimeWave'].values, unique_buffer_time_stamps) | ||
#add the end to the list of indexes if needed | ||
if ind[-1] < my_binary.dims['event_index']: | ||
ind = np.append(ind, my_binary.dims['event_index']) | ||
for i in range(len(ind)-1): | ||
#from index | ||
fr = ind[i] | ||
#to index | ||
to = ind[i+1] | ||
#number of scattering particle windows saved in the buffer | ||
N_S = scatter_triggered[fr:to].sum() | ||
#number of incandesence particle windows saved in the buffer | ||
N_I = incandesence_triggered[fr:to].sum() | ||
#length of time, in seconds, for a single measurement of the digitizer | ||
t_b = 1. / digitization_rate | ||
#"PW is the total number of data points (for each channel of data) in the window" | ||
P_W = event_length_points | ||
#Buffer length in seconds | ||
T_B = buffer_length_seconds | ||
#Fraction triggered (Eq. 2) in doi:10.1080/02786826.2022.2064265 | ||
F_T = (N_S * S_S+N_I) * P_W * t_b / T_B | ||
#pretrigger length data points | ||
P_PT = pretrigger_length_points | ||
#T_P is the total points in each window | ||
T_P = event_length_points | ||
#relative bias as in Eq. 3 in doi:10.1080/02786826.2022.2064265 | ||
B_rel = -(P_PT / T_P) * F_T | ||
#save the bias value to the whole buffer | ||
bias[fr:to] = B_rel | ||
#add the bias containing array to the xarray | ||
my_binary['DeadtimeRelativeBias'] = xr.DataArray(bias, dims=['event_index']) | ||
return my_binary |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,120 @@ | ||
import numpy as np | ||
from .peak_fit import _gaus | ||
from scipy.optimize import curve_fit | ||
|
||
|
||
def beam_shape(my_binary, beam_position_from='peak maximum', Globals=None): | ||
""" | ||
Calculates the beam shape needed to determine the laser intensity profile | ||
used for leading-edge-only fits. The beam position is first determined | ||
from the position of the peak. The beam profile is then calculated by | ||
positioning all the peaks to that position. The beam shape is then the mean | ||
of all the normalized profiles arround that position. | ||
Parameters | ||
---------- | ||
my_binary : xarray Dataset | ||
Dataset with gaussian fits. The input data set is retuned by | ||
pysp2.util.gaussian_fit(). | ||
Globals: DMTGlobals structure or None | ||
DMTGlobals structure containing calibration coefficients and detector | ||
signal limits. | ||
beam_position_from : str | ||
'peak maximum' = construct the beam profile arround the maximum peak | ||
poistion. The maximum peak position is determied from the peak-height | ||
weighted average peak position. | ||
'split detector' = construct the beam profile around the split position. | ||
The split position is taken from the split detector. Not working yet. | ||
Returns | ||
------- | ||
coeff : numpy array with gaussian fit [amplitude, peakpos, width, base] | ||
of the beam shape profile. | ||
beam_profile : numpy array with the beam profile calculated from the mean | ||
of all the profiles. The array has as many data points as | ||
the input data. | ||
""" | ||
|
||
num_base_pts_2_avg = 5 | ||
bins=my_binary['columns'] | ||
|
||
#boolean array for ok particles in the high gain channel | ||
scatter_high_gain_accepted = np.logical_and.reduce(( | ||
my_binary['PkFWHM_ch0'].values > Globals.ScatMinWidth, | ||
my_binary['PkFWHM_ch0'].values < Globals.ScatMaxWidth, | ||
my_binary['PkHt_ch0'].values > Globals.ScatMinPeakHt1, | ||
my_binary['PkHt_ch0'].values < Globals.ScatMaxPeakHt1, | ||
my_binary['FtPos_ch0'].values < Globals.ScatMaxPeakPos, | ||
my_binary['FtPos_ch0'].values > Globals.ScatMinPeakPos)) | ||
|
||
#boolean array that is True for particles that have not been triggered by | ||
#the high gain incandesence channel | ||
no_incand_trigged = my_binary['PkHt_ch1'].values < Globals.IncanMinPeakHt1 | ||
|
||
#find good events that only scatter light | ||
only_scattering_high_gain = np.logical_and.reduce((scatter_high_gain_accepted, | ||
no_incand_trigged)) | ||
|
||
print('High gain scattering particles for beam analysis :: ', | ||
np.sum(only_scattering_high_gain)) | ||
|
||
#make an xarray of the purely scattering particles | ||
my_high_gain_scatterers = my_binary.sel(index = only_scattering_high_gain, | ||
event_index = only_scattering_high_gain) | ||
|
||
#numpy array for the normalized beam profiels | ||
my_high_gain_profiles = np.zeros((my_high_gain_scatterers.dims['index'], | ||
my_high_gain_scatterers.dims['columns'])) \ | ||
* np.nan | ||
|
||
#weighted mean of beam peak position. Weight is scattering amplitude. | ||
high_gain_peak_pos = int( | ||
np.sum(np.multiply(my_high_gain_scatterers['PkPos_ch0'].values, | ||
my_high_gain_scatterers['PkHt_ch0'].values))/ \ | ||
np.sum(my_high_gain_scatterers['PkHt_ch0'].values)) | ||
|
||
#loop through all particle events | ||
for i in my_high_gain_scatterers['event_index']: | ||
data = my_high_gain_scatterers['Data_ch0'].sel(event_index=i).values | ||
#base level | ||
base = np.mean(data[0:num_base_pts_2_avg]) | ||
#peak height | ||
peak_height = data.max()-base | ||
#peak position | ||
peak_pos = data.argmax() | ||
#normalize the profile to range [0,1] | ||
profile = (data - base) / peak_height | ||
#distance to the mean beam peak position | ||
peak_difference = high_gain_peak_pos - peak_pos | ||
#insert so that the peak is at the right position (accounts for | ||
#particles travelling at different speeds) | ||
if peak_difference > 0: | ||
my_high_gain_profiles[i, peak_difference:] = profile[:len(data) - | ||
peak_difference] | ||
elif peak_difference < 0: | ||
my_high_gain_profiles[i, :len(data)+peak_difference] = profile[-peak_difference:] | ||
else: | ||
my_high_gain_profiles[i, :] = profile | ||
|
||
#get the beam profile | ||
beam_profile = np.nanmean(my_high_gain_profiles, axis=0) | ||
#find values that are lower than 5% of the max value. | ||
low_values = np.argwhere(beam_profile < 0.05) | ||
#fit the gaussian curve to the beginning of the profile only. The tail | ||
#can deviate from zero substantially and is not of interest. | ||
fit_to = low_values[low_values > high_gain_peak_pos].min() | ||
|
||
#initial guess | ||
p0 = np.array([beam_profile.max() - beam_profile.min(), | ||
np.argmax(beam_profile), 20., | ||
np.nanmin(beam_profile)]).astype(float) | ||
#fit gaussian curve | ||
#coeff[amplitude, peakpos, width , baseline] | ||
coeff, var_matrix = curve_fit(_gaus, bins[:fit_to], beam_profile[:fit_to], | ||
p0=p0, method='lm', maxfev=40, ftol=1e-3) | ||
|
||
return coeff, beam_profile |
Oops, something went wrong.