Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def tmpFits(*hdus):
# Given a list of numpy arrays, create a temporary FITS file that
# contains them as consecutive HDUs. Yield it, then remove it.
hdus = [astropy.io.fits.PrimaryHDU(hdus[0])] + \
[astropy.io.fits.ImageHDU(hdu) for hdu in hdus[1:]]
hdulist = astropy.io.fits.HDUList(hdus)
tempdir = tempfile.mkdtemp()
try:
filename = os.path.join(tempdir, 'test.fits')
hdulist.writeto(filename)
yield filename
finally:
shutil.rmtree(tempdir)
#plot(wavelengths,np.polyval(coef_res,wavelengths))
#plot(vecp,vecm,'o')
c_p2w_ob_B[order] = coeffs_pix2wav
#print order, rms_ms/np.sqrt(float(len(wavelengths))), rms_ms, len(residuals)
p0 = np.zeros( npar_wsol_B )
p0[0] = (int(np.around(0.5*nord_ob1))+or0_B) * Global_ZP
#GLOBALutils.get_zero_order_number(ords,meds)
p1_B, G_pix_B, G_ord_B, G_wav_B, II_B, rms_ms_B, G_res_B = \
GLOBALutils.Fit_Global_Wav_Solution(All_Pixel_Centers_B, All_Wavelengths_B, All_Orders_B,\
np.ones(All_Intensities_B.shape), p0, Cheby=use_cheby,\
maxrms=MRMS, Inv=Inverse_m, minlines=minlines_glob,\
order0=or0_B,ntotal=nord_ob1,npix=len(thar_order),nx=ncoef_x_B,nm=ncoef_m_B)
nhdu = pyfits.PrimaryHDU(spec_thar_ob)
if os.access(wavsol_fits,os.F_OK):
os.system('rm '+wavsol_fits)
nhdu.writeto(wavsol_fits)
#plot(ords,meds,'ro')
#coefs_m = np.polyfit(ords,meds,6)
#plot(ords, meds - np.polyval(coefs_m,ords),'ro')
#show()
All_Pixel_Centers_co_R = np.array([])
All_Wavelengths_co_R = np.array([])
All_Orders_co_R = np.array([])
All_Centroids_co_R = np.array([])
All_Sigmas_co_R = np.array([])
All_Intensities_co_R = np.array([])
meds,ords = [],[]
for order in range(nord_co2):
# inference
t2 = process_hdu(src_im, results, sess)
if VERB:
speed2 = str(round((h*w)/(t2*1000000), 3))
print("HDU " + str(k) + "/" + str(nb_hdu-1) + " inference done in " + str(t2) + " s: " + speed2 + " MPix/s")
speedhdu = str(round((h*w)/((t1+t2)*1000000), 3))
full_zero = False
timelog.append(t1+t2)
if VERB: print("HDU " + str(k) + "/" + str(nb_hdu-1) + " done in " + str(t1+t2) + " s: " + speedhdu + " MPix/s")
else:
# full zero image
if VERB: print("HDU " + str(k) + "/" + str(nb_hdu-1) + " inference done (image is null, output is null)")
full_zero = True
if k==0:
m_hdu = fits.PrimaryHDU(np.squeeze(results))
fill_hdu_header(m_hdu)
hdu.append(m_hdu)
else:
sub_hdu = fits.ImageHDU(np.squeeze(results))
fill_hdu_header(sub_hdu)
hdu.append(sub_hdu)
else:
# if this seems not to be data then copy the hdu
hdu.append(src_im_hdu[k])
if VERB: print("HDU " + str(k) + "/" + str(nb_hdu-1) + " done (just copied as it is not 2D data or supported type)")
tw = write_hdu(hdu, im_path.split(".fits")[0] + ".masks.fits")
if VERB:
print(im_path.split(".fits")[0] + ".masks.fits written to disk in " + str(tw) + " s")
if len(timelog):
tt = sum(timelog) + tw
model = model.value
from astropy.io import fits
import time
if header is None:
if hasattr(self._converter, "_wcs"):
header = self._converter._wcs.to_header()
else:
header = fits.Header()
# Strip off units if the image is a Quantity
if hasattr(input_image, 'unit'):
input_image = input_image.value.copy()
hdu = fits.PrimaryHDU(input_image, header)
skel_hdr = header.copy()
skel_hdr['BUNIT'] = ("", "bool")
skel_hdr['COMMENT'] = "Skeleton created by fil_finder on " + \
time.strftime("%c")
skel_hdu = fits.ImageHDU(skels.astype(int), skel_hdr)
skel_lp_hdu = fits.ImageHDU(skels_lp.astype(int), skel_hdr)
model_hdu = fits.ImageHDU(model, header)
hdulist = fits.HDUList([hdu, skel_hdu, skel_lp_hdu, model_hdu])
hdulist.writeto(savename)
img = regrid_conv_img * nan_pix
distance = 140.
hdr['CDELT2'] /= r
# Toggle saving of the exact maps used in the algorithm
save_regrid_convolve = True
if save_regrid_convolve:
hdr['NAXIS1'] = img.shape[1]
hdr['NAXIS2'] = img.shape[0]
hdu = fits.PrimaryHDU(img.astype(">f4"), header=hdr)
hdu.writeto(filename[:-5]+"/"+filename[:-5]+"_regrid_convolved.fits")
print filename, distance
filfind = fil_finder_2D(img, hdr, beamwidth,
distance=distance, glob_thresh=20)
print filfind.beamwidth, filfind.imgscale
save_name = filename[:-5]
filfind.create_mask()
filfind.medskel(verbose=verbose)
filfind.analyze_skeletons()
wcslin_pscale=1., uniqid=1,
pixfrac=pixfrac, kernel=kernel, fillval=0,
stepsize=10, wcsmap=None)
except:
psf_wcs._naxis1, psf_wcs._naxis2 = psf_wcs._naxis
adrizzle.do_driz(psf, psf_wcs, psf*0+flt_weight,
wcs_slice,
outsci, outwht, outctx, 1., 'cps', 1,
wcslin_pscale=1., uniqid=1,
pixfrac=pixfrac, kernel=kernel, fillval=0,
stepsize=10, wcsmap=None)
if False:
count += 1
hdu = pyfits.HDUList([pyfits.PrimaryHDU(), pyfits.ImageHDU(data=psf*100, header=utils.to_header(psf_wcs))])
ds9.set('frame {0}'.format(count+1))
ds9.set_pyfits(hdu)
#ss = 1000000/2
ss = 1./outsci.sum()
hdu = pyfits.HDUList([pyfits.PrimaryHDU(), pyfits.ImageHDU(data=outsci*ss, header=utils.to_header(wcs_slice))])
if False:
ds9.set('frame 2')
ds9.set_pyfits(hdu)
return hdu
# save as fits file
if (os.access(sci_fits_ob,os.F_OK)):
os.remove( sci_fits_ob )
if (os.access(sci_fits_co,os.F_OK)):
os.remove( sci_fits_co )
if (os.access(sci_fits_ob_simple,os.F_OK)):
os.remove( sci_fits_ob_simple )
if (os.access(sci_fits_co_simple,os.F_OK)):
os.remove( sci_fits_co_simple )
if (os.access(sci_fits_bac,os.F_OK)):
os.remove( sci_fits_bac )
hdu = pyfits.PrimaryHDU( sci_S_ob )
hdu.writeto( sci_fits_ob )
hdu = pyfits.PrimaryHDU( sci_S_co )
hdu.writeto( sci_fits_co )
hdu = pyfits.PrimaryHDU( sci_Ss_ob )
hdu.writeto( sci_fits_ob_simple )
hdu = pyfits.PrimaryHDU( sci_Ss_co )
hdu.writeto( sci_fits_co_simple )
hdu = pyfits.PrimaryHDU( sci_bac )
hdu.writeto( sci_fits_bac )
else:
print '\t\t\t'+fsim, "has already been extracted, reading in product fits files..."
sci_S_ob = pyfits.getdata( sci_fits_ob )
sci_S_co = pyfits.getdata( sci_fits_co )
sci_Ss_ob = pyfits.getdata( sci_fits_ob_simple )
sci_Ss_co = pyfits.getdata( sci_fits_co_simple )
sci_bac = pyfits.getdata( sci_fits_bac )
fout = 'proc/'+ obname + '_' + \
h[0].header['HIERARCH ESO CORA SHUTTER START DATE'] + '_' +\
'UT' + fsim[-17:-9] + '_' +\
def from_tree(cls, data, ctx):
hdus = []
first = True
for hdu_entry in data:
header = fits.Header([fits.Card(*x) for x in hdu_entry['header']])
data = hdu_entry.get('data')
if data is not None:
try:
data = data.__array__()
except ValueError:
data = None
if first:
hdu = fits.PrimaryHDU(data=data, header=header)
first = False
elif data.dtype.names is not None:
hdu = fits.BinTableHDU(data=data, header=header)
else:
hdu = fits.ImageHDU(data=data, header=header)
hdus.append(hdu)
hdulist = fits.HDUList(hdus)
return hdulist
decrange : array/tupel
Tupel/Array with two entries giving the DEC range of the map i.e (decmin, decmax).
Returns
-------
hdu : astropy.io.fits.PrimaryHDU
FITS primary HDU containing the skymap.
"""
decnbins, ranbins = map.shape
decstep = (decrange[1] - decrange[0]) / float(decnbins)
rastep = (rarange[1] - rarange[0]) / float(ranbins)
hdu = None
if primary:
hdu = fits.PrimaryHDU(image)
else:
hdu = fits.ImageHDU(image)
header = hdu.header
# Image definition
header['CTYPE1'] = 'RA---CAR'
header['CTYPE2'] = 'DEC--CAR'
header['CUNIT1'] = 'deg'
header['CUNIT2'] = 'deg'
header['CRVAL1'] = rarange[0]
header['CRVAL2'] = 0. # Must be zero for the lines to be rectilinear according to Calabretta (2002)
header['CRPIX1'] = .5
header['CRPIX2'] = -decrange[0] / decstep + .5 # Pixel outside of the image at DEC = 0.
header['CDELT1'] = rastep
header['CDELT2'] = decstep
header['RADESYS'] = 'FK5'
print '\n\tExtraction of Flat calibration frames:'
Flat_spec_fits = dirout + 'Flat_spec.fits'
Flat_bkg_fits = dirout + 'BKG_flat.fits'
if ( os.access(Flat_spec_fits,os.F_OK) == False ) or (force_flat_extract):
print "\t\tNo previous Flat extracted or extraction forced, extracting and saving..."
Centers = np.zeros((len(c_all),RFlat.shape[1]))
for i in range(nord):
Centers[i,:]=scipy.polyval(c_all[i,:],np.arange(len(Centers[i,:])))
BKG = GLOBALutils.get_scat(RFlat,Centers,span=10)
flat_S = GLOBALutils.simple_extraction( RFlat - BKG, c_all,ext_aperture, min_extract_col,max_extract_col, npools )
flat_S = GLOBALutils.invert(flat_S)
flat_S = flat_S[::-1]
if (os.access(Flat_spec_fits,os.F_OK)):
os.remove( Flat_spec_fits )
hdu = pyfits.PrimaryHDU( flat_S )
hdu.writeto( Flat_spec_fits )
if (os.access(Flat_bkg_fits,os.F_OK)):
os.remove( Flat_bkg_fits )
hdu = pyfits.PrimaryHDU( BKG )
hdu.writeto( Flat_bkg_fits )
else:
print "\t\tExtracted flat found, loading..."
flat_S = pyfits.getdata( Flat_spec_fits )
flat_S_n, norms = GLOBALutils.FlatNormalize_single(flat_S, mid=int(.5*flat_S.shape[1]))
print '\n\tExtraction of ThAr calibration frames:'
# Extract all ThAr files
for fsim in thars:
hthar = pyfits.open( fsim )