Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def read_MAPS(working_directory,
file_name, channel_num=1):
# data_dict = OrderedDict()
data_sets = OrderedDict()
img_dict = OrderedDict()
# Empty container for metadata
mdata = ScanMetadataXRF()
# cut off bad point on the last position of the spectrum
# bad_point_cut = 0
fit_val = None
fit_v_pyxrf = None
file_path = os.path.join(working_directory, file_name)
print('file path is {}'.format(file_path))
with h5py.File(file_path, 'r+') as f:
data = f['MAPS']
fname = file_name.split('.')[0]
# for 2D MAP
def _extract_metadata_from_header(hdr):
"""
Extract metadata from start and stop document. Metadata extracted from other document
in the scan are beamline specific and added to dictionary at later time.
"""
start_document = hdr.start
mdata = ScanMetadataXRF()
data_locations = {
"scan_id": ["scan_id"],
"scan_uid": ["uid"],
"scan_instrument_id": ["beamline_id"],
"scan_instrument_name": [],
"scan_time_start": ["time"],
"scan_time_start_utc": ["time"],
"instrument_mono_incident_energy": ["beamline_status/energy"],
"instrument_beam_current": [],
"instrument_detectors": ["detectors"],
"sample_name": ["sample/name", "sample"],
"experiment_plan_name": ["plan_name"],
load fitting results
load_roi_results : bool
load results of roi computation
Returns
-------
data_dict : dict
with fitting data
data_sets : dict
data from each channel and channel summed, a dict of DataSelection objects
"""
data_sets = OrderedDict()
img_dict = OrderedDict()
# Empty container for metadata
mdata = ScanMetadataXRF()
file_path = os.path.join(working_directory, file_name)
# defined in other_list in config file
try:
dict_sc = retrieve_data_from_hdf_suitcase(file_path)
except Exception:
dict_sc = {}
with h5py.File(file_path, 'r+') as f:
# Retrieve metadata if it exists
if "xrfmap/scan_metadata" in f: # Metadata is always loaded
metadata = f["xrfmap/scan_metadata"]
for key, value in metadata.attrs.items():
# Convert ndarrays to lists (they were lists before they were saved)
load_each_channel = Bool(False)
# Used while loading data from database
# True: overwrite existing data file if it exists
# False: create new file with unique name (original name + version number)
file_overwrite_existing = Bool(False)
p1_row = Int(-1)
p1_col = Int(-1)
p2_row = Int(-1)
p2_col = Int(-1)
data_ready = Bool(False)
# Scan metadata
scan_metadata = Typed(ScanMetadataXRF)
# Indicates if metadata is available for recently loaded scan
scan_metadata_available = Bool(False)
# Indicates if the incident energy is available in metadata for recently loaded scan
incident_energy_available = Bool(False)
# Changing this variable sets incident energy in the ``plot_model``
# Must be linked with the function ``plot_model.set_incident_energy``
# This value is not updated if incident energy parameter is changed somewhere else, therefore
# its value should not be used for computations!!!
incident_energy_set = Float(0.0)
def __init__(self, **kwargs):
self.working_directory = kwargs['working_directory']
self.mask_data = None
# Display PyXRF version in the window title