Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
array = np.array([[1.0, 1.0, 1.0], [1.0, 2.0, 1.0], [1.0, 1.0, 1.0]])
fits.writeto(data=array, filename=path + "3x3_ones_central_two.fits")
stop
array1 = np.ones((3, 3))
array2 = 2.0 * np.ones((3, 3))
array3 = 3.0 * np.ones((3, 3))
array4 = 4.0 * np.ones((3, 3))
array5 = 5.0 * np.ones((3, 3))
array6 = 6.0 * np.ones((3, 3))
array7 = 7.0 * np.ones((3, 3))
array8 = 8.0 * np.ones((3, 3))
fits.writeto(data=array2, filename=path + "3x3_twos.fits")
fits.writeto(data=array3, filename=path + "3x3_threes.fits")
fits.writeto(data=array4, filename=path + "3x3_fours.fits")
fits.writeto(data=array5, filename=path + "3x3_fives.fits")
fits.writeto(data=array6, filename=path + "3x3_sixes.fits")
fits.writeto(data=array7, filename=path + "3x3_sevens.fits")
fits.writeto(data=array8, filename=path + "3x3_eights.fits")
new_hdul = fits.HDUList()
new_hdul.append(fits.ImageHDU(array1))
new_hdul.append(fits.ImageHDU(array2))
new_hdul.append(fits.ImageHDU(array3))
new_hdul.append(fits.ImageHDU(array4))
new_hdul.append(fits.ImageHDU(array5))
new_hdul.append(fits.ImageHDU(array6))
new_hdul.append(fits.ImageHDU(array7))
new_hdul.append(fits.ImageHDU(array8))
def __init__(self, flatdata, datamodel=None, calibid='calibid-unknown', dtype='float32'):
super(CommonFlatCorrector, self).__init__(datamodel=datamodel,
calibid=calibid,
dtype=dtype)
if isinstance(flatdata, fits.HDUList):
self.corr = flatdata[0].data
elif isinstance(flatdata, fits.ImageHDU):
self.corr = flatdata.data
else:
self.corr = numpy.asarray(flatdata)
self.corrmean = self.corr.mean()
self.flattag = 'flat'
"""Save image data to a fits file.
Args:
fname (str): path to output fits file
mjd (int): MJD of saved image
time (float): UTC time of saved image
Returns:
"""
# Transform to Stokes parameters:
if (im.polrep!='stokes') or (im.pol_prim!='I'):
im = im.switch_polrep(polrep_out='stokes', pol_prim_out=None)
# Create header and fill in some values
header = fits.Header()
header['OBJECT'] = im.source
header['CTYPE1'] = 'RA---SIN'
header['CTYPE2'] = 'DEC--SIN'
header['CDELT1'] = -im.psize/DEGREE
header['CDELT2'] = im.psize/DEGREE
header['OBSRA'] = im.ra * 180/12.
header['OBSDEC'] = im.dec
header['FREQ'] = im.rf
#TODO these are the default values for centered images
#TODO support for arbitrary CRPIX?
header['CRPIX1'] = im.xdim/2. + .5
header['CRPIX2'] = im.ydim/2. + .5
if not mjd: mjd = float(im.mjd)
if not time: time = im.time
def split_files(file_list, page_type):
"""JUST FOR USE DURING DEVELOPMENT WITH FILESYSTEM
Splits the files in the filesystem into "unlooked" and "archived",
with the "unlooked" images being the most recent 10% of files.
"""
exp_times = []
for file in file_list:
hdr = fits.getheader(file, ext=0)
exp_start = hdr['EXPSTART']
exp_times.append(exp_start)
exp_times_sorted = sorted(exp_times)
i_cutoff = int(len(exp_times) * .1)
t_cutoff = exp_times_sorted[i_cutoff]
mask_unlooked = np.array([t < t_cutoff for t in exp_times])
if page_type == 'unlooked':
print('ONLY RETURNING {} "UNLOOKED" FILES OF {} ORIGINAL FILES'.format(len([m for m in mask_unlooked if m]), len(file_list)))
return [f for i, f in enumerate(file_list) if mask_unlooked[i]]
elif page_type == 'archive':
print('ONLY RETURNING {} "ARCHIVED" FILES OF {} ORIGINAL FILES'.format(len([m for m in mask_unlooked if not m]), len(file_list)))
return [f for i, f in enumerate(file_list) if not mask_unlooked[i]]
"nsample_array_dtype must be one of: np.float64, np.float32, np.float16"
)
# iterate through files and organize
# create a list of included coarse channels
# find the first and last times that have data
for file in filelist:
if file.lower().endswith(".metafits"):
# force only one metafits file
if metafits_file is not None:
raise ValueError("multiple metafits files in filelist")
metafits_file = file
elif file.lower().endswith(".fits"):
# check if ppds file
try:
fits.getheader(file, extname="ppds")
ppds_file = file
except Exception:
# check obsid
head0 = fits.getheader(file, 0)
if obs_id is None:
obs_id = head0["OBSID"]
else:
if head0["OBSID"] != obs_id:
raise ValueError(
"files from different observations submitted "
"in same list"
)
# check headers for first and last times containing data
headstart = fits.getheader(file, 1)
headfin = fits.getheader(file, -1)
first_time = headstart["TIME"] + headstart["MILLITIM"] / 1000.0
def try_load_beams(data):
'''
Try loading a beam table from a FITS HDU list.
'''
try:
from radio_beam import Beam
except ImportError:
warnings.warn("radio_beam is not installed. No beam "
"can be created.",
ImportError
)
if isinstance(data, fits.BinTableHDU):
if 'BPA' in data.data.names:
beam_table = data.data
return beam_table
else:
raise ValueError("No beam table found")
elif isinstance(data, fits.HDUList):
for ihdu, hdu_item in enumerate(data):
if isinstance(hdu_item, (fits.PrimaryHDU, fits.ImageHDU)):
beam = try_load_beams(hdu_item.header)
elif isinstance(hdu_item, fits.BinTableHDU):
if 'BPA' in hdu_item.data.names:
beam_table = hdu_item.data
return beam_table
try:
head['FREQID'] = 1
hdulist['AIPS AN'].header = head
##################### AIPS FQ TABLE #####################################################################################################
# Convert types & columns
freqid = np.array([1])
bandfreq = np.array([ch1_freq + ch_spacing*i - ref_freq for i in range(nchan)]).reshape([1,nchan])
chwidth = np.array([ch_bw for i in range(nchan)]).reshape([1,nchan])
totbw = np.array([ch_bw for i in range(nchan)]).reshape([1,nchan])
sideband = np.array([1 for i in range(nchan)]).reshape([1,nchan])
freqid = fits.Column(name="FRQSEL", format="1J", array=freqid)
bandfreq = fits.Column(name="IF FREQ", format="%dD"%(nchan), array=bandfreq, unit='HZ')
chwidth = fits.Column(name="CH WIDTH",format="%dE"%(nchan), array=chwidth, unit='HZ')
totbw = fits.Column(name="TOTAL BANDWIDTH",format="%dE"%(nchan),array=totbw, unit='HZ')
sideband = fits.Column(name="SIDEBAND",format="%dJ"%(nchan),array=sideband)
cols = fits.ColDefs([freqid, bandfreq, chwidth, totbw, sideband])
# create table
tbhdu = fits.BinTableHDU.from_columns(cols)
# header information
tbhdu.header.append(("NO_IF", nchan, "Number IFs"))
tbhdu.header.append(("EXTNAME","AIPS FQ"))
tbhdu.header.append(("EXTVER",1))
hdulist.append(tbhdu)
##################### AIPS NX TABLE #####################################################################################################
scan_times = []
scan_time_ints = []
#Cargar una imagen desde un fichero FITS
from astropy.io import fits
fitsFile="../examples/Filters/frame-i-002830-6-0398.fits"
hdulist = fits.open(fitsFile)
img = hdulist[0].data
#Mostrar una imagen con OpenCV
import cv2
import numpy as np
Min=abs(np.amin(img)) #Se toma el valor absoluto porque por errores en el CCD pueden existir mediciones ligeramente erradas. En concreto, negativas muy cercanas a cero
Max=np.amax(img)
img = 255*(img+Min)/Max
cv2.namedWindow("Image", cv2.WINDOW_NORMAL) #Se crea una ventana "Image"
cv2.imshow("Image",img) #Se dibuja img en la ventana Image
cv2.waitKey() #Esta sentencia muestra la ventana hasta que se presiona una tecla
def planets_finder(image_dir,filetype,method,planet_position =[0,0],range_of_search = 0):
##### PARAMETER EXPLANATION #######
## image_dir = ARRAY or STRING = directory with the image stored as a fits file or array
## filetype = STRING = filetype of the input image, either 'array' or 'fits'
## method = STRING = method with which search for the planets, either 'global_max' (search the maximum of the entire
# image) or 'local_max' search for the maximum inside a region of given size and centered on
# planet_location
## planet_position = ARRAY = [x,y] = position around which search for the local maximum
## range_of_search = INTEGER = size of the region (centered on planet_position) inside which search for the maximum
#Open the image depending on the filetype:
if filetype=='array':
image=image_dir
if filetype=='fits':
data = fits.open(image_dir)
image=data[0].data
# hdr = data[0].header
# # Store the image dimension:
# length_x = len(image[0])
# length_y = len(image[1])
#Find the maximum depending on the method input:
if method=='local_max':
resized_image = image[int(planet_position[1]-range_of_search/2.):
int(planet_position[1]+range_of_search/2.),
int(planet_position[0]-range_of_search/2.):
int(planet_position[0]+range_of_search/2.)]
# scale image
# beware, X can mean just about anything
poltya = np.full((self.Nants_telescope), 'X', dtype=np.object_)
polaa = [90.0] + np.zeros(self.Nants_telescope)
poltyb = np.full((self.Nants_telescope), 'Y', dtype=np.object_)
polab = [0.0] + np.zeros(self.Nants_telescope)
col1 = fits.Column(name='ANNAME', format='8A',
array=self.antenna_names)
col2 = fits.Column(name='STABXYZ', format='3D',
array=self.antenna_positions)
# convert to 1-indexed from 0-indexed indicies
col3 = fits.Column(name='NOSTA', format='1J',
array=self.antenna_numbers + 1)
col4 = fits.Column(name='MNTSTA', format='1J', array=mntsta)
col5 = fits.Column(name='STAXOF', format='1E', array=staxof)
col6 = fits.Column(name='POLTYA', format='1A', array=poltya)
col7 = fits.Column(name='POLAA', format='1E', array=polaa)
# col8 = fits.Column(name='POLCALA', format='3E', array=polcala)
col9 = fits.Column(name='POLTYB', format='1A', array=poltyb)
col10 = fits.Column(name='POLAB', format='1E', array=polab)
# col11 = fits.Column(name='POLCALB', format='3E', array=polcalb)
# note ORBPARM is technically required, but we didn't put it in
cols = fits.ColDefs([col1, col2, col3, col4, col5, col6, col7, col9,
col10])
ant_hdu = fits.BinTableHDU.from_columns(cols)
ant_hdu.header['EXTNAME'] = 'AIPS AN'
ant_hdu.header['EXTVER'] = 1