Browse Source

debug and nwe functions

joschka_dev
Jianshun Gao 1 year ago
parent
commit
2b2606afc4
  1. 200
      Analyser/FitAnalyser.py
  2. 36
      DataContainer/ReadData.py
  3. 37
      ToolFunction/ToolFunction.py
  4. 1056
      test.ipynb

200
Analyser/FitAnalyser.py

@ -1,4 +1,5 @@
import numpy as np
from uncertainties import ufloat
import lmfit
from lmfit.models import (ConstantModel, ComplexConstantModel, LinearModel, QuadraticModel,
@ -201,15 +202,21 @@ class TwoGaussian2dModel(Model):
super().__init__(two_gaussian2d, **kwargs)
self._set_paramhints_prefix()
def _set_paramhints_prefix(self):
self.set_param_hint('delta', value=-1, max=0)
self.set_param_hint('A_sigmax', expr=f'{self.prefix}delta + {self.prefix}B_sigmax')
def guess(self, data, x, y, negative=False, **kwargs):
pars_guess = guess_from_peak2d(self.helperModel, data, x, y, negative)
pars = self.make_params(A_amplitude=pars_guess['amplitude'], A_centerx=pars_guess['centerx'], A_centery=pars_guess['centery'],
A_sigmax=pars_guess['sigmax'], A_sigmay=pars_guess['sigmay'],
B_amplitude=pars_guess['amplitude'], B_centerx=pars_guess['centerx'], B_centery=pars_guess['centery'],
B_sigmax=pars_guess['sigmax'], B_sigmay=pars_guess['sigmay'])
pars = self.make_params(A_amplitude=pars_guess['amplitude'].value, A_centerx=pars_guess['centerx'].value, A_centery=pars_guess['centery'].value,
A_sigmax=pars_guess['sigmax'].value, A_sigmay=pars_guess['sigmay'].value,
B_amplitude=pars_guess['amplitude'].value, B_centerx=pars_guess['centerx'].value, B_centery=pars_guess['centery'].value,
B_sigmax=pars_guess['sigmax'].value, B_sigmay=pars_guess['sigmay'].value)
pars.add(f'{self.prefix}delta', value=-1, max=0, vary=True)
pars[f'{self.prefix}A_sigmax'].set(expr=f'delta + {self.prefix}B_sigmax')
pars.add(f'{self.prefix}delta', value=-1, max=0, min=-np.inf, vary=True)
pars[f'{self.prefix}A_sigmay'].set(min=0.0)
pars[f'{self.prefix}B_sigmax'].set(min=0.0)
pars[f'{self.prefix}B_sigmay'].set(min=0.0)
@ -264,6 +271,29 @@ class FitAnalyser():
self.fitDim = fitDim
def print_params_set_templet(self, params=None):
if params is None:
params = self.fitModel.make_params()
for key in params:
res = "params.add("
res += "name=\"" + key + "\", "
if not params[key].expr is None:
res += "expr=" + params[key].expr +")"
else:
res += "value=" + f'{params[key].value:3g}' + ", "
if str(params[key].max)=="inf":
res += "max=np.inf, "
else:
res += "max=" + f'{params[key].max:3g}' + ", "
if str(params[key].min)=="-inf":
res += "min=-np.inf, "
else:
res += "min=" + f'{params[key].min:3g}' + ", "
res += "vary=" + str(params[key].vary) + ")"
print(res)
def _guess_1D(self, data, x, **kwargs):
return self.fitModel.guess(data=data, x=x, **kwargs)
@ -374,10 +404,12 @@ class FitAnalyser():
}
)
if isinstance(paramsArray, type(self.fitModel.make_params())):
if input_core_dims is None:
kwargs.update(
{
"input_core_dims": [['x'], []],
"input_core_dims": [['x']],
}
)
@ -389,13 +421,11 @@ class FitAnalyser():
if input_core_dims is None:
kwargs.update(
{
"input_core_dims": [[x], []],
"input_core_dims": [[x]],
}
)
x = dataArray[x].to_numpy()
if isinstance(paramsArray, type(self.fitModel.make_params())):
if self.fitDim == 1:
return xr.apply_ufunc(self._fit_1D, dataArray, kwargs={'params':paramsArray,'x':x},
output_dtypes=[type(lmfit.model.ModelResult(self.fitModel, self.fitModel.make_params()))],
@ -409,7 +439,7 @@ class FitAnalyser():
if input_core_dims is None:
kwargs.update(
{
"input_core_dims": [['x', 'y'], []],
"input_core_dims": [['x', 'y']],
}
)
else:
@ -419,7 +449,7 @@ class FitAnalyser():
elif input_core_dims is None:
kwargs.update(
{
"input_core_dims": [['x', 'y'], []],
"input_core_dims": [['x', 'y']],
}
)
@ -436,6 +466,27 @@ class FitAnalyser():
**kwargs)
else:
if input_core_dims is None:
kwargs.update(
{
"input_core_dims": [['x'], []],
}
)
if x is None:
if 'x' in dataArray.dims:
x = dataArray['x'].to_numpy()
else:
if isinstance(x, str):
if input_core_dims is None:
kwargs.update(
{
"input_core_dims": [[x], []],
}
)
x = dataArray[x].to_numpy()
if self.fitDim == 1:
return xr.apply_ufunc(self._fit_1D, dataArray, paramsArray, kwargs={'x':x},
output_dtypes=[type(lmfit.model.ModelResult(self.fitModel, self.fitModel.make_params()))],
@ -537,4 +588,131 @@ class FitAnalyser():
return xr.apply_ufunc(self._eval_2D, fitResultArray, kwargs={"x":_x, "y":_y, "shape":(len(x), len(y))}, **kwargs)
def _get_fit_value_single(self, fitResult, key):
return fitResult.params[key].value
def _get_fit_value(self, fitResult, params):
func = np.vectorize(self._get_fit_value_single)
res = tuple(
func(fitResult, key)
for key in params
)
return res
def get_fit_value(self, fitResult, dask='parallelized', **kwargs):
firstIndex = {
key: fitResult[key][0]
for key in fitResult.dims
}
firstFitResult = fitResult.sel(firstIndex).item()
params = list(firstFitResult.params.keys())
output_core_dims=[ [] for _ in range(len(params))]
kwargs.update(
{
"dask": dask,
"output_core_dims": output_core_dims,
}
)
value = xr.apply_ufunc(self._get_fit_value, fitResult, kwargs=dict(params=params), **kwargs)
value = xr.Dataset(
data_vars={
params[i]: value[i]
for i in range(len(params))
},
attrs=fitResult.attrs
)
return value
def _get_fit_std_single(self, fitResult, key):
return fitResult.params[key].stderr
def _get_fit_std(self, fitResult, params):
func = np.vectorize(self._get_fit_std_single)
res = tuple(
func(fitResult, key)
for key in params
)
return res
def get_fit_std(self, fitResult, dask='parallelized', **kwargs):
firstIndex = {
key: fitResult[key][0]
for key in fitResult.dims
}
firstFitResult = fitResult.sel(firstIndex).item()
params = list(firstFitResult.params.keys())
output_core_dims=[ [] for _ in range(len(params))]
kwargs.update(
{
"dask": dask,
"output_core_dims": output_core_dims,
}
)
value = xr.apply_ufunc(self._get_fit_std, fitResult, kwargs=dict(params=params), **kwargs)
value = xr.Dataset(
data_vars={
params[i]: value[i]
for i in range(len(params))
},
attrs=fitResult.attrs
)
return value
def _get_fit_full_result_single(self, fitResult, key):
return ufloat(fitResult.params[key].value, fitResult.params[key].stderr)
def _get_fit_full_result(self, fitResult, params):
func = np.vectorize(self._get_fit_full_result_single)
res = tuple(
func(fitResult, key)
for key in params
)
return res
def get_fit_full_result(self, fitResult, dask='parallelized', **kwargs):
firstIndex = {
key: fitResult[key][0]
for key in fitResult.dims
}
firstFitResult = fitResult.sel(firstIndex).item()
params = list(firstFitResult.params.keys())
output_core_dims=[ [] for _ in range(len(params))]
kwargs.update(
{
"dask": dask,
"output_core_dims": output_core_dims,
}
)
value = xr.apply_ufunc(self._get_fit_full_result, fitResult, kwargs=dict(params=params), **kwargs)
value = xr.Dataset(
data_vars={
params[i]: value[i]
for i in range(len(params))
},
attrs=fitResult.attrs
)
return value

36
DataContainer/ReadData.py

@ -3,6 +3,8 @@ import numpy as np
from collections import OrderedDict
from functools import partial
import copy
import glob
import os
def _read_globals_attrs(variable_attrs, context=None):
@ -74,6 +76,9 @@ def _read_globals_attrs(variable_attrs, context=None):
}
)
# if result['scanAxis'] == []:
# result['scanAxis'] = ['runs',]
return result
@ -83,16 +88,18 @@ def _read_shot_number_from_hdf5(x):
return x.assign(shotNum=shotNum)
def _assign_scan_axis_partial(x, datesetOfGlobal):
def _assign_scan_axis_partial(x, datesetOfGlobal, fullFilePath):
scanAxis = datesetOfGlobal.scanAxis
filePath = x.encoding["source"]
shotNum = filePath.split("_")[-1].split("_")[-1].split(".")[0]
filePath = x.encoding["source"].replace("\\", "/")
shotNum = np.where(fullFilePath==filePath)
shotNum = np.squeeze(shotNum)
# shotNum = filePath.split("_")[-1].split("_")[-1].split(".")[0]
x = x.assign(shotNum=shotNum)
x = x.expand_dims(list(scanAxis))
return x.assign_coords(
{
key: np.atleast_1d(datesetOfGlobal.attrs[key][int(shotNum)])
key: np.atleast_1d(np.atleast_1d(datesetOfGlobal.attrs[key])[int(shotNum)])
for key in scanAxis
}
)
@ -108,6 +115,21 @@ def update_hdf5_file():
def read_hdf5_file(filePath, group=None, datesetOfGlobal=None, preprocess=None, join="outer", parallel=True, engine="h5netcdf", phony_dims="access", **kwargs):
filePath = np.sort(np.atleast_1d(filePath))
filePathAbs = []
for i in range(len(filePath)):
filePathAbs.append(os.path.abspath(filePath[i]).replace("\\", "/"))
fullFilePath = []
for i in range(len(filePathAbs)):
fullFilePath.append(list(np.sort(glob.glob(filePathAbs[i]))))
fullFilePath = np.array(fullFilePath).flatten()
for i in range(len(fullFilePath)):
fullFilePath[i] = fullFilePath[i].replace("\\", "/")
kwargs.update(
{
'join': join,
@ -120,7 +142,7 @@ def read_hdf5_file(filePath, group=None, datesetOfGlobal=None, preprocess=None,
if datesetOfGlobal is None:
datesetOfGlobal = xr.open_mfdataset(
filePath,
fullFilePath,
group="globals",
concat_dim="fileNum",
combine="nested",
@ -130,14 +152,14 @@ def read_hdf5_file(filePath, group=None, datesetOfGlobal=None, preprocess=None,
combine_attrs=_read_globals_attrs,
parallel=True, )
_assgin_scan_axis = partial(_assign_scan_axis_partial, datesetOfGlobal=datesetOfGlobal)
_assgin_scan_axis = partial(_assign_scan_axis_partial, datesetOfGlobal=datesetOfGlobal, fullFilePath=fullFilePath)
if preprocess is None:
kwargs.update({'preprocess':_assgin_scan_axis})
else:
kwargs.update({'preprocess':preprocess})
ds = xr.open_mfdataset(filePath, **kwargs)
ds = xr.open_mfdataset(fullFilePath, **kwargs)
newDimKey = np.append(['x', 'y', 'z'], [ chr(i) for i in range(97, 97+23)])

37
ToolFunction/ToolFunction.py

@ -1,7 +1,11 @@
import numpy as np
import glob
from datetime import date
import numpy as np
from uncertainties import unumpy as unp
import xarray as xr
def get_mask(dataArray):
return np.ones(dataArray.shape, dtype=bool)
@ -41,8 +45,35 @@ def get_date():
return today.strftime("%y/%m/%d")
def resolve_fit_result(fitResult):
def _combine_uncertainty(value, std):
return unp.uarray(value, std)
def combine_uncertainty(value, std, dask='parallelized', **kwargs):
kwargs.update(
{
"dask": dask,
}
)
return xr.apply_ufunc(_combine_uncertainty, value, std, **kwargs)
def _seperate_uncertainty_single(data):
return data.n, data.s
def _seperate_uncertainty(data):
func = np.vectorize(_seperate_uncertainty_single)
return func(data)
def seperate_uncertainty(data, dask='parallelized', **kwargs):
kwargs.update(
{
"dask": dask,
"output_core_dims": [[], []],
}
)
return
return xr.apply_ufunc(_seperate_uncertainty, data, **kwargs)

1056
test.ipynb
File diff suppressed because one or more lines are too long
View File

Loading…
Cancel
Save