Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# above will need to stay until this issue is resolved (see profiling.py).
task_inds = np.array(list(summed_task_dict.keys()))
bl_inds = task_inds[:, 0] % Nbls
time_inds = (task_inds[:, 0] - bl_inds) // Nbls
Ntimes_loc = np.unique(time_inds).size
Nbls_loc = np.unique(bl_inds).size
Nfreqs_loc = np.unique(task_inds[:, 2]).size
axes_dict = {
'Ntimes_loc': Ntimes_loc,
'Nbls_loc': Nbls_loc,
'Nfreqs_loc': Nfreqs_loc,
'Nsrcs_loc': Nsky_parts,
'prof_rank': prof.rank
}
with open(prof.meta_file, 'w') as afile:
for k, v in axes_dict.items():
afile.write("{} \t {:d}\n".format(k, int(v)))
# All the sources in this summed list are foobar-ed
# Source are summed over but only have 1 name
# Some source may be correct
summed_local_task_list = list(summed_task_dict.values())
# Tasks contain attributes that are not pickle-able.
# Remove everything except uvdata_index and visibility_vector
for task in summed_local_task_list:
del task.time
del task.freq
del task.freq_i
del task.sources
del task.baseline
del task.telescope
count.next()
if rank == 0 and not quiet:
pbar.update(count.current_value())
comm.Barrier()
count.free()
if rank == 0 and not quiet:
pbar.finish()
if rank == 0 and not quiet:
print("Calculations Complete.", flush=True)
# If profiling is active, save meta data:
from .profiling import prof # noqa
if hasattr(prof, 'meta_file'): # pragma: nocover
# Saving axis sizes on current rank (local) and for the whole job (global).
# These lines are affected by issue 179 of line_profiler, so the nocover
# above will need to stay until this issue is resolved (see profiling.py).
task_inds = np.array(list(summed_task_dict.keys()))
bl_inds = task_inds[:, 0] % Nbls
time_inds = (task_inds[:, 0] - bl_inds) // Nbls
Ntimes_loc = np.unique(time_inds).size
Nbls_loc = np.unique(bl_inds).size
Nfreqs_loc = np.unique(task_inds[:, 2]).size
axes_dict = {
'Ntimes_loc': Ntimes_loc,
'Nbls_loc': Nbls_loc,
'Nfreqs_loc': Nfreqs_loc,
'Nsrcs_loc': Nsky_parts,
'prof_rank': prof.rank
}
beam_list: :class:~`pyuvsim.BeamList
BeamList carrying beam model (in object mode).
beam_dict: dict
Map of antenna numbers to index in beam_list.
Yields
------
Iterable of UVTask objects.
"""
# The task_ids refer to tasks on the flattened meshgrid.
if not isinstance(input_uv, UVData):
raise TypeError("input_uv must be UVData object.")
# Skymodel will now be passed in as a catalog array.
if not isinstance(catalog, SkyModelData):
raise TypeError("catalog must be a SkyModelData object.")
# Splitting the catalog for memory's sake.
Nsrcs_total = catalog.Ncomponents
if Nsky_parts > 1:
Nsky_parts = int(Nsky_parts)
src_iter = [simutils.iter_array_split(s, Nsrcs_total, Nsky_parts)[0]
for s in range(Nsky_parts)]
else:
src_iter = [range(Nsrcs_total)]
# Build the antenna list.
antenna_names = input_uv.antenna_names
antennas = []
antpos_enu, antnums = input_uv.get_ENU_antpos()
for num, antname in enumerate(antenna_names):
if beam_dict is None:
Iterable of UVTask objects.
"""
# The task_ids refer to tasks on the flattened meshgrid.
if not isinstance(input_uv, UVData):
raise TypeError("input_uv must be UVData object.")
# Skymodel will now be passed in as a catalog array.
if not isinstance(catalog, SkyModelData):
raise TypeError("catalog must be a SkyModelData object.")
# Splitting the catalog for memory's sake.
Nsrcs_total = catalog.Ncomponents
if Nsky_parts > 1:
Nsky_parts = int(Nsky_parts)
src_iter = [simutils.iter_array_split(s, Nsrcs_total, Nsky_parts)[0]
for s in range(Nsky_parts)]
else:
src_iter = [range(Nsrcs_total)]
# Build the antenna list.
antenna_names = input_uv.antenna_names
antennas = []
antpos_enu, antnums = input_uv.get_ENU_antpos()
for num, antname in enumerate(antenna_names):
if beam_dict is None:
beam_id = 0
else:
beam_id = beam_dict[antname]
antennas.append(Antenna(antname, num, antpos_enu[num], beam_id))
baselines = {}
Ntimes = input_uv.Ntimes
(1) Npus < Nbltf -- Split by Nbltf, split sources in the task loop for memory's sake.
(2) Nbltf < Npus and Nsrcs > Npus -- Split by Nsrcs only
(3) (Nsrcs, Nbltf) < Npus -- Split by Nbltf
- Split by instrument axes here.
- Within the task loop, decide on source chunks and make skymodels on the fly.
"""
Nbltf = Nbls * Ntimes * Nfreqs
split_srcs = False
if (Nbltf < Npus) and (Npus < Nsrcs):
split_srcs = True
if split_srcs:
src_inds, Nsrcs_local = simutils.iter_array_split(rank, Nsrcs, Npus)
task_inds = range(Nbltf)
Ntasks_local = Nbltf
else:
task_inds, Ntasks_local = simutils.iter_array_split(rank, Nbltf, Npus)
src_inds = range(Nsrcs)
Nsrcs_local = Nsrcs
return task_inds, src_inds, Ntasks_local, Nsrcs_local
- Within the task loop, decide on source chunks and make skymodels on the fly.
"""
Nbltf = Nbls * Ntimes * Nfreqs
split_srcs = False
if (Nbltf < Npus) and (Npus < Nsrcs):
split_srcs = True
if split_srcs:
src_inds, Nsrcs_local = simutils.iter_array_split(rank, Nsrcs, Npus)
task_inds = range(Nbltf)
Ntasks_local = Nbltf
else:
task_inds, Ntasks_local = simutils.iter_array_split(rank, Nbltf, Npus)
src_inds = range(Nsrcs)
Nsrcs_local = Nsrcs
return task_inds, src_inds, Ntasks_local, Nsrcs_local
def _str_to_obj(self, beam_model, use_shared_mem=False):
# Convert beam strings to objects.
if isinstance(beam_model, (AnalyticBeam, UVBeam)):
return beam_model
if beam_model.startswith('analytic'):
bspl = beam_model.split('_')
model = bspl[1]
to_set = {}
for extra in bspl[2:]:
par, val = extra.split('=')
full = self._float_params[par]
to_set[full] = float(val)
return AnalyticBeam(model, **to_set)
path = beam_model # beam_model = path to beamfits
uvb = UVBeam()
if use_shared_mem and (mpi.world_comm is not None):
if mpi.rank == 0:
uvb.read_beamfits(path)
uvb.peak_normalize()
for key, attr in uvb.__dict__.items():
if not isinstance(attr, parameter.UVParameter):
continue
if key == '_data_array':
uvb.__dict__[key].value = mpi.shared_mem_bcast(attr.value, root=0)
else:
uvb.__dict__[key].value = mpi.world_comm.bcast(attr.value, root=0)
mpi.world_comm.Barrier()
else:
return beam_model
if beam_model.startswith('analytic'):
bspl = beam_model.split('_')
model = bspl[1]
to_set = {}
for extra in bspl[2:]:
par, val = extra.split('=')
full = self._float_params[par]
to_set[full] = float(val)
return AnalyticBeam(model, **to_set)
path = beam_model # beam_model = path to beamfits
uvb = UVBeam()
if use_shared_mem and (mpi.world_comm is not None):
if mpi.rank == 0:
uvb.read_beamfits(path)
uvb.peak_normalize()
for key, attr in uvb.__dict__.items():
if not isinstance(attr, parameter.UVParameter):
continue
if key == '_data_array':
uvb.__dict__[key].value = mpi.shared_mem_bcast(attr.value, root=0)
else:
uvb.__dict__[key].value = mpi.world_comm.bcast(attr.value, root=0)
mpi.world_comm.Barrier()
else:
uvb.read_beamfits(path)
for key, val in self.uvb_params.items():
setattr(uvb, key, val)
uvb.extra_keywords['beam_path'] = path
def __init__(self, sources, time, freq, baseline, telescope, freq_i=0):
self.time = time
self.freq = freq
self.sources = sources # SkyModel object
self.baseline = baseline
self.telescope = telescope
self.freq_i = freq_i
self.visibility_vector = None
self.uvdata_index = None # Where to add the visibility in the uvdata object.
if isinstance(self.time, float):
self.time = Time(self.time, format='jd')
if isinstance(self.freq, float):
self.freq = self.freq * units.Hz
if sources.spectral_type == 'flat':
self.freq_i = 0
save: bool
Save mock catalog as npz file.
rseed: int
If using the random configuration, pass in a RandomState seed.
return_data: bool
If True, return a SkyModelData object instead of SkyModel.
Returns
-------
class:`pyradiosky.SkyModel` or class:`SkyModelData`
The catalog, as either a SkyModel or a SkyModelData (if `return_data` is True)
dict
A dictionary of keywords used to define the catalog.
"""
if not isinstance(time, Time):
time = Time(time, scale='utc', format='jd')
if array_location is None:
array_location = EarthLocation(lat='-30d43m17.5s', lon='21d25m41.9s',
height=1073.)
if arrangement not in ['off-zenith', 'zenith', 'cross', 'triangle', 'long-line', 'hera_text',
'random']:
raise KeyError("Invalid mock catalog arrangement: " + str(arrangement))
mock_keywords = {
'time': time.jd, 'arrangement': arrangement,
'array_location': repr(
(array_location.lat.deg, array_location.lon.deg, array_location.height.value))
}
if arrangement == 'off-zenith':