Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
'-o', '--output', metavar='OUTPUT.fits[.gz]', required=True,
help='name of output FITS file [required]')
opts = parser.parse_args()
# Late imports.
import healpy as hp
import numpy as np
from astropy.table import Table
from lalinference.io import fits
from lalinference.bayestar import distance
from lalinference.bayestar import moc
from lalinference.bayestar.sky_map import derasterize
from lalinference.healpix_tree import adaptive_healpix_histogram
samples = Table.read(opts.input, format='ascii')
theta = 0.5*np.pi - samples['dec']
phi = samples['ra']
if 'distance' in samples.colnames:
samples.rename_column('distance', 'dist')
p = adaptive_healpix_histogram(
theta, phi, opts.samples_per_bin,
nside=opts.nside, max_nside=opts.max_nside, nest=True)
def diststats(samples, max_nside, nside, ipix):
step = (max_nside // nside) ** 2
i0 = np.searchsorted(samples['ipix'], step * ipix)
i1 = np.searchsorted(samples['ipix'], step * (ipix + 1))
if i0 == i1:
return np.inf, 0.0
def main(args):
"""Compute weight for galaxies in args.meas_filter band and save it the
main catalogs, args.cat_name, in all bands. The half light information is
stored in the args.fits_file_name files.
"""
parent_name = args.parent_file_name.replace('filter', args.meas_filter)
parent_cat = Table.read(args.main_path + parent_name,
format='fits')
select_name = args.fits_file_name.replace('filter', args.meas_filter)
select_cat = Table.read(args.main_path + args.out_dir + select_name,
format='fits')
# error in efficiency was high beyond half light radius of 55 pixels
# function is computed only till there
bins = np.linspace(0, args.max_hlr, 10)
eff, err = get_efficiency_with_error(select_cat['flux_radius'],
parent_cat['FLUX_RADIUS'],
bins=bins)
hlr = 0.5 * (bins[1:] + bins[:-1])
z = np.polyfit(hlr, eff, 3)
p = np.poly1d(z)
norm = p(args.max_hlr)
eff_new = p(select_cat['flux_radius'])
weight = 1 / eff_new * norm
This function ...
:return:
"""
# Determine the path to the temporary table file
path = fs.join(self.temp_path, "fields.tbl")
ra = ra.to("deg").value
dec = dec.to("deg").value
width = width.to("deg").value
# Get the info
montage.mArchiveList("SDSS", band, str(ra) + " " + str(dec), width, width, path)
# Load the table
table = Table.read(path, format="ascii")
# Return the table
return table
def load_array_layout_from_file(array_layout_filename):
"""
Read an array layout from a FITS file with a ``TELARRAY`` extension
"""
return Table.read(array_layout_filename, hdu="TELARRAY")
either input filename/url or a Table instance
format: str
astropy.table format string (e.g. 'ascii.ecsv') in case the
format cannot be determined from the file extension
kwargs: extra keyword arguments
extra arguments passed to `astropy.table.read()`, depending on
file type (e.g. format, hdu, path)
"""
tab = url_or_table
if not isinstance(url_or_table, Table):
tab = Table.read(url_or_table, **kwargs)
return cls(
cam_id=tab.meta.get('CAM_ID', 'Unknown'),
pix_id=tab['pix_id'],
pix_x=tab['pix_x'].quantity,
pix_y=tab['pix_y'].quantity,
pix_area=tab['pix_area'].quantity,
pix_type=tab.meta['PIX_TYPE'],
pix_rotation=Angle(tab.meta['PIX_ROT'] * u.deg),
cam_rotation=Angle(tab.meta['CAM_ROT'] * u.deg),
)
lowres_model = Celestial(conv_model, header=hires_3.header)
res = Celestial(lowres.image - lowres_model.image, header=lowres.header)
res.save_to_fits('_res_{}.fits'.format(f_magnify))
lowres_model.resize_image(1 / f_magnify, method=config.fluxmodel.interp)
lowres_model.save_to_fits('_lowres_model.fits')
setattr(results, 'lowres_model_compact', copy.deepcopy(lowres_model))
res.resize_image(1 / f_magnify, method=config.fluxmodel.interp)
res.save_to_fits(output_name + '_res.fits')
setattr(results, 'res', res)
logger.info('Compact objects has been subtracted from low-resolution image! Saved as "{}".'.format(output_name + '_res.fits'))
# 10. Subtract bright star halos! Only for those left out in flux model!
star_cat = Table.read('_bright_stars_3.fits', format='fits')
star_cat['x'] /= f_magnify
star_cat['y'] /= f_magnify
ra, dec = res.wcs.wcs_pix2world(star_cat['x'], star_cat['y'], 0)
star_cat.add_columns([Column(data=ra, name='ra'), Column(data=dec, name='dec')])
b = config.starhalo.b
f = config.starhalo.f
sigma = config.starhalo.sigma
minarea = config.starhalo.minarea
deblend_cont = config.starhalo.deblend_cont
deblend_nthresh = config.starhalo.deblend_nthresh
sky_subtract = config.starhalo.sky_subtract
flux_aper = config.starhalo.flux_aper
logger.info('Extract objects from compact-object-subtracted low-resolution image with:')
logger.info(' - sigma = %.1f, minarea = %d', sigma, minarea)
def get_main_catalog(args, index_table, all_seg_ids):
"""Makes main catalog containing information on all selected galaxies.
Columns are identical to COSMOS Real Galaxy catalog"""
print "Creating main catalog"
for f, filt in enumerate(args.filter_names):
final_table = main_table()
complete_table=Table()
for seg_id in all_seg_ids:
file_name = args.main_path + seg_id + '/' + filt + '_selected.fits'
seg_cat = Table.read(file_name, format='fits')
q, = np.where(index_table['SEG_ID'] == seg_id)
indx_seg = index_table[q]
temp = join(seg_cat, indx_seg, keys='NUMBER')
col = Column(temp['HDU'], name='PSF_HDU')
temp.add_column(col)
temp.rename_column('MAG_CORR', 'MAG')
temp.rename_column('HDU', 'GAL_HDU')
p_scales = np.ones(len(q))*0.03
weights = np.ones(len(q))
im = [args.gal_im_name.replace('filter', args.file_filter_name[f])]*len(q)
im_names = [im[i].replace('umber',str(temp['FILE_NUM'][i])) for i in range(len(im))]
psf = [args.psf_im_name.replace('filter', args.file_filter_name[f])]*len(q)
psf_names = [psf[i].replace('umber',str(temp['FILE_NUM'][i])) for i in range(len(psf))]
noise_names=[args.noise_file_name.replace('filter', args.file_filter_name[f])]*len(q)
names = ('WEIGHT','GAL_FILENAME', 'PSF_FILENAME',
'PIXEL_SCALE', 'NOISE_FILENAME')
>>> print(data['ra'][:1]) # doctest: +REMOTE_DATA
[49.6275024]
>>> # first DEC value
>>> print(data['dec'][:1]) # doctest: +REMOTE_DATA
[-1.04175591]
"""
data_home = get_data_home(data_home)
archive_file = os.path.join(data_home, os.path.basename(DATA_URL))
if not os.path.exists(archive_file):
if not download_if_missing:
raise IOError('data not present on disk. '
'set download_if_missing=True to download')
data = Table.read(DATA_URL)
data.write(archive_file)
else:
data = Table.read(archive_file)
if cleaned:
# -1.1 < FeH < 0.1
data = data[(data['FeH'] > -1.1) & (data['FeH'] < 0.1)]
# -0.03 < alpha/Fe < 0.57
data = data[(data['alphFe'] > -0.03) & (data['alphFe'] < 0.57)]
# 5000 < Teff < 6500
data = data[(data['Teff'] > 5000) & (data['Teff'] < 6500)]
# 3.5 < log(g) < 5
data = data[(data['logg'] > 3.5) & (data['logg'] < 5)]
def _read_table_file(self, table_file):
"""Read an `astropy.table.Table` from table_file to set up the `JobArchive`"""
self._table_file = table_file
if os.path.exists(self._table_file):
self._table = Table.read(self._table_file, hdu='JOB_ARCHIVE')
self._table_ids = Table.read(self._table_file, hdu='FILE_IDS')
else:
self._table, self._table_ids = JobDetails.make_tables({})
self._table_id_array = self._table_ids['file_id'].data
self._fill_cache()