implenment loading data from MongoDB.
This commit is contained in:
parent
fbe70b4fca
commit
f1e547b1be
@ -2,12 +2,16 @@ from collections import OrderedDict
|
|||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pymongo
|
import pymongo
|
||||||
|
from pymongo import MongoClient
|
||||||
import xarray_mongodb
|
import xarray_mongodb
|
||||||
import bson
|
import bson
|
||||||
import builtins
|
import builtins
|
||||||
|
|
||||||
import xarray as xr
|
import xarray as xr
|
||||||
|
# import sys
|
||||||
|
# #sys.path.insert(0, 'C:/Users/Fabrizio Klassen/PycharmProjects/DyLabDataViewer/src/bin/Analyser/AnalyserScript')
|
||||||
|
# import sys
|
||||||
|
# sys.path.append('../')
|
||||||
from ToolFunction.ToolFunction import get_date
|
from ToolFunction.ToolFunction import get_date
|
||||||
|
|
||||||
|
|
||||||
@ -292,7 +296,7 @@ class MongoDB:
|
|||||||
|
|
||||||
filter.update(global_parameters)
|
filter.update(global_parameters)
|
||||||
|
|
||||||
mongoID, _ = self.xdb.put(dataSet)
|
mongoID, _ = self.xdb.put(stackedDataSet_single)
|
||||||
|
|
||||||
data_label = {
|
data_label = {
|
||||||
name:
|
name:
|
||||||
@ -427,7 +431,6 @@ class MongoDB:
|
|||||||
|
|
||||||
docs = self.mongoDB['global'].find(filter).sort('runNum')
|
docs = self.mongoDB['global'].find(filter).sort('runNum')
|
||||||
docs = [doc['global_parameters'] for doc in docs]
|
docs = [doc['global_parameters'] for doc in docs]
|
||||||
|
|
||||||
for doc in docs:
|
for doc in docs:
|
||||||
|
|
||||||
global_parameters = doc
|
global_parameters = doc
|
||||||
@ -553,6 +556,112 @@ class MongoDB:
|
|||||||
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def load_data(self, shotNum=None, globalDict=None, date=None, field=None):
|
def load_data(self, shotNum, data_key=None, globalDict=None, date=None):
|
||||||
# load all the data of specified shot
|
"""load observables of given shot"""
|
||||||
pass
|
"""the documents of a given shot can carry a variety of data types, i.e. optical density,
|
||||||
|
N_count, centerx etc. In order to not load all the data and take too much RAM, the user
|
||||||
|
is presented with a drop down featuring all possible observables. Only after selection the
|
||||||
|
actual data is being loaded
|
||||||
|
|
||||||
|
:param shotNum: The shot number, defaults to None
|
||||||
|
:type shotNum: str, optional
|
||||||
|
:param date: The date of the data ('YYYY/MM/DD'), defaults to None
|
||||||
|
:type date: str, optional
|
||||||
|
:return: All data types in the given shot
|
||||||
|
:rtype: list
|
||||||
|
"""
|
||||||
|
# set date
|
||||||
|
if not date is None:
|
||||||
|
self.set_date(date)
|
||||||
|
|
||||||
|
# collect global parameters and scan axes
|
||||||
|
if globalDict is None:
|
||||||
|
globalDict = self.read_global_all(shotNum=shotNum, date=date)
|
||||||
|
|
||||||
|
# initialize output dictionary
|
||||||
|
res = {'year': self.year, 'month': self.month, 'day': self.day, 'global_parameters': {}}
|
||||||
|
|
||||||
|
# add all global parameters except scan axes
|
||||||
|
res['global_parameters'].update(
|
||||||
|
{
|
||||||
|
key: value
|
||||||
|
for key, value in globalDict.items()
|
||||||
|
if key not in ['scanAxis', 'scanAxisLength']
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# find data
|
||||||
|
filter = {
|
||||||
|
'year': self.year,
|
||||||
|
'month': self.month,
|
||||||
|
'day': self.day,
|
||||||
|
'shotNum': shotNum,
|
||||||
|
}
|
||||||
|
|
||||||
|
docs = self.mongoDB['global'].find(filter).sort('runNum')
|
||||||
|
|
||||||
|
if data_key is None:
|
||||||
|
data_key = [key for key in docs[0] if not key in ['year', 'month', 'day', 'shotNum', 'runNum', 'global_parameters', '_id']]
|
||||||
|
|
||||||
|
for key in data_key:
|
||||||
|
res[key] = self._load_data(shotNum=shotNum, data_key=key, globalDict=globalDict)
|
||||||
|
|
||||||
|
res['global_parameters'].update(
|
||||||
|
{
|
||||||
|
'scanAxis': globalDict['scanAxis'],
|
||||||
|
'scanAxisLength': globalDict['scanAxisLength'],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
def _load_data(self, shotNum, data_key, globalDict):
|
||||||
|
"""load all the data of specified shot"""
|
||||||
|
"""go through the given global document and find all the MongoDB object, then replace them with the document they linked to.
|
||||||
|
|
||||||
|
:param shotNum: The shot number, defaults to None
|
||||||
|
:type shotNum: str, optional
|
||||||
|
:param globalDict: All global parameters plus scan axes and scan axes length, defaults to None
|
||||||
|
:type globalDict: dict, optional
|
||||||
|
:param date: The date of the data ('YYYY/MM/DD'), defaults to None
|
||||||
|
:type date: str, optional
|
||||||
|
:return: Data from all runs of given shot including global parameters and date
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
|
||||||
|
# collect data from all docs of given shot
|
||||||
|
filter = {
|
||||||
|
'year': self.year,
|
||||||
|
'month': self.month,
|
||||||
|
'day': self.day,
|
||||||
|
'shotNum': shotNum,
|
||||||
|
}
|
||||||
|
|
||||||
|
# find matching docs
|
||||||
|
docs = self.mongoDB['global'].find(filter).sort('runNum')
|
||||||
|
|
||||||
|
data = []
|
||||||
|
for doc in docs:
|
||||||
|
key=data_key
|
||||||
|
if isinstance(doc[key], dict) and ('mongoID' in doc[key]):
|
||||||
|
mongoID = doc[key]['mongoID']
|
||||||
|
engine = doc[key]['engine']
|
||||||
|
single_data = self._load_data_single(mongoID=mongoID, engine=engine)
|
||||||
|
print(single_data)
|
||||||
|
for axis in globalDict['scanAxis']:
|
||||||
|
if not axis in single_data.dims:
|
||||||
|
single_data = single_data.expand_dims(axis)
|
||||||
|
else:
|
||||||
|
engine = None
|
||||||
|
single_data = doc[key]
|
||||||
|
|
||||||
|
data.append(single_data)
|
||||||
|
|
||||||
|
# combine data along coordinate axes
|
||||||
|
try:
|
||||||
|
if engine =='xarray':
|
||||||
|
data = xr.combine_by_coords(data)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return data
|
||||||
|
2238
testLoadMongoDB.ipynb
Normal file
2238
testLoadMongoDB.ipynb
Normal file
File diff suppressed because one or more lines are too long
Loading…
Reference in New Issue
Block a user