'), css_class='col-md-4'),
Div('b_exptime', css_class='col-md-8'), css_class='form-row'
),
Div(
@@ -283,7 +333,7 @@ def is_valid(self):
return not errors
def _init_observation_payload(self, target):
- wait = True #On Hold
+ #wait = True #On Hold
coords = SkyCoord(ra=target.ra*u.degree, dec=target.dec*u.degree)
now = datetime.utcnow()
sn_name = target.name
@@ -324,55 +374,75 @@ def observation_payload(self):
if self.data['n_or_s'] == 'north':
if self.data['slit'] == '1.5':
obsid_map = {
- 'B600': {
- 'arc': '68',
- 'acquisition': '66',
- 'science_with_flats': '67',
+# 'B600': {
+# 'arc': '68',
+# 'acquisition': '66',
+# 'science_with_flats': '67',
+# },
+ 'B480': {
+ 'arc': '104',
+ 'acquisition': '102',
+ 'science_with_flats': '103',
},
'R400': {
- 'arc': '74',
- 'acquisition': '72',
- 'science_with_flats': '73',
+ 'arc': '110',
+ 'acquisition': '108',
+ 'science_with_flats': '109',
}
}
elif self.data['slit'] == '1':
obsid_map = {
- 'B600': {
- 'arc': '65',
- 'acquisition': '63',
- 'science_with_flats': '64',
+# 'B600': {
+# 'arc': '65',
+# 'acquisition': '63',
+# 'science_with_flats': '64',
+# },
+ 'B480': {
+ 'arc': '101',
+ 'acquisition': '99',
+ 'science_with_flats': '100',
},
'R400': {
- 'arc': '71',
- 'acquisition': '69',
- 'science_with_flats': '70',
+ 'arc': '107',
+ 'acquisition': '105',
+ 'science_with_flats': '106',
}
}
elif self.data['n_or_s'] == 'south':
if self.data['slit'] == '1.5':
obsid_map = {
'B600': {
- 'arc': '64',
- 'acquisition': '65',
- 'science_with_flats': '66',
+ 'arc': '56',
+ 'acquisition': '57',
+ 'science_with_flats': '58',
+ },
+ 'B480': {
+ 'arc': '93',
+ 'acquisition': '94',
+ 'science_with_flats': '95',
},
'R400': {
- 'arc': '70',
- 'acquisition': '71',
- 'science_with_flats': '72',
+ 'arc': '62',
+ 'acquisition': '63',
+ 'science_with_flats': '64',
}
}
elif self.data['slit'] == '1':
obsid_map = {
'B600': {
- 'arc': '61',
- 'acquisition': '62',
- 'science_with_flats': '63',
+ 'arc': '53',
+ 'acquisition': '54',
+ 'science_with_flats': '55',
+ },
+ 'B480': {
+ 'arc': '90',
+ 'acquisition': '91',
+ 'science_with_flats': '92',
},
'R400': {
- 'arc': '67',
- 'acquisition': '68',
- 'science_with_flats': '69',
+ 'arc': '59',
+ 'acquisition': '60',
+ 'science_with_flats': '61',
}
}
@@ -395,7 +465,7 @@ def observation_payload(self):
payloads[f'{color_grating}_{key}']['target'] = 'Acquisition'
elif key == 'science_with_flats':
payloads[f'{color_grating}_{key}']['target'] = target.name
- if color_grating == 'B600':
+ if color_grating in ['B600', 'B480']:
payloads[f'{color_grating}_{key}']['exptime'] = int(float(self.data['b_exptime'])/2)
elif color_grating == 'R400':
payloads[f'{color_grating}_{key}']['exptime'] = int(float(self.data['r_exptime'])/2)
diff --git a/docker-compose.yml b/docker-compose.yml
index c2d51029..ffc7f94e 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -11,8 +11,13 @@ services:
volumes:
- ${SNEX2_DB_DATA_PATH}:/var/lib/postgresql/data
ports:
+<<<<<<< HEAD
- 5435:5432
#platform: linux/amd64
+=======
+ - 5435:5432/tcp
+ platform: linux/amd64
+>>>>>>> 4040213 (something)
snex2:
image: lcogt/snex2:latest
network_mode: bridge
@@ -20,6 +25,7 @@ services:
- "snex2-db:snex2-db"
ports:
- 8889:8080
+ platform: linux/amd64
restart: "always"
mem_limit: "4g"
logging:
@@ -75,6 +81,7 @@ services:
- ${SNEX_MUSCAT_FITS_PATH}:/snex2/data/fits/2m0a/
- ${SNEX_GW_FITS_PATH}:/snex2/data/fits/gw/
- ${SNEX_FLOYDS_PATH}:/snex2/data/floyds/
+<<<<<<< HEAD
- ${SNEX_FLOYDS_WEB_PATH}:/snex2/data/WEB/floyds/
snex2-dev:
image: snex2-dev:latest
@@ -139,3 +146,67 @@ services:
- ${SNEX_GW_FITS_PATH}:/snex2/data/fits/gw/
- ${SNEX_FLOYDS_PATH}:/snex2/data/floyds/
- ${SNEX_FLOYDS_WEB_PATH}:/snex2/data/WEB/floyds/
+=======
+# snex2-dev:
+# # image: snex2-dev:latest
+# network_mode: bridge
+# links:
+# - "snex2-db:snex2-db"
+# ports:
+# - 8890:8080
+# restart: "always"
+# mem_limit: "12g"
+# logging:
+# options:
+# max-file: "3"
+# max-size: "10m"
+# environment:
+# - DB_NAME=snex2-dev
+# - DB_HOST=snex2-db
+# - DB_PORT=5432
+# - SNEX2_USER=${SNEX2_USER}
+# - SNEX2_DB_USER=${SNEX2_DB_USER}
+# - SNEX2_DB_PASSWORD=${SNEX2_DB_PASSWORD}
+# - SNEX1_DB_USER=${SNEX1_DB_USER}
+# - SNEX1_DB_PASSWORD=${SNEX1_DB_PASSWORD}
+# - SNEX2_DB_BACKEND=postgres
+# - LCO_APIKEY=${SNEX2_LCO_APIKEY}
+# - SNEXBOT_APIKEY=${SNEX2_SNEXBOT_APIKEY}
+# - AWS_ACCESS_KEY_ID=${SNEX2_AWS_ACCESS_KEY_ID}
+# - AWS_S3_REGION_NAME=${SNEX2_AWS_REGION_NAME}
+# - AWS_SECRET_ACCESS_KEY=${SNEX2_AWS_SECRET_ACCESS_KEY}
+# - AWS_STORAGE_BUCKET_NAME=${SNEX2_AWS_STORAGE_BUCKET_NAME}
+# - TWITTER_APIKEY=${SNEX2_TWITTER_APIKEY}
+# - TWITTER_SECRET=${SNEX2_TWITTER_SECRET}
+# - TWITTER_ACCESSTOKEN=${SNEX2_TWITTER_ACCESSTOKEN}
+# - TWITTER_ACCESSSECRET=${SNEX2_TWITTER_ACCESSSECRET}
+# - GEMINI_EMAIL=${SNEX2_GEMINI_EMAIL}
+# - GEMINI_SOUTH_PROGRAMID=${SNEX2_GEMINI_SOUTH_PROGRAMID}
+# - GEMINI_SOUTH_PASSWORD=${SNEX2_GEMINI_SOUTH_PASSWORD}
+# - GEMINI_SOUTH_SERVER=${SNEX2_GEMINI_SOUTH_SERVER}
+# - GEMINI_NORTH_PROGRAMID=${SNEX2_GEMINI_NORTH_PROGRAMID}
+# - GEMINI_NORTH_PASSWORD=${SNEX2_GEMINI_NORTH_PASSWORD}
+# - GEMINI_NORTH_SERVER=${SNEX2_GEMINI_NORTH_SERVER}
+# - SNEX_EMAIL_PASSWORD=${SNEX_EMAIL_PASSWORD}
+# - TNS_APIKEY=${TNS_APIKEY}
+# - TNS_APIID=${TNS_APIID}
+# - SWIFT_USERNAME=${SWIFT_USERNAME}
+# - SWIFT_SECRET=${SWIFT_SECRET}
+# - LASAIR_IRIS_TOKEN=${LASAIR_IRIS_TOKEN}
+# - SCIMMA_AUTH_USERNAME=${SCIMMA_AUTH_USERNAME}
+# - SCIMMA_AUTH_PASSWORD=${SCIMMA_AUTH_PASSWORD}
+# - GCN_CLASSIC_CLIENT_ID=${GCN_CLASSIC_CLIENT_ID}
+# - GCN_CLASSIC_CLIENT_SECRET=${GCN_CLASSIC_CLIENT_SECRET}
+# - CREDENTIAL_USERNAME=${CREDENTIAL_USERNAME}
+# - CREDENTIAL_PASSWORD=${CREDENTIAL_PASSWORD}
+# - TM_TOKEN=${TM_TOKEN}
+# - HERMES_BASE_URL=${HERMES_BASE_URL}
+# volumes:
+# - ${SNEX_THUMBNAIL_PATH}:/snex2/data/thumbs/
+# - ${SNEX_FITS_PATH}:/snex2/data/fits/
+# - ${SNEX_0m4_FITS_PATH}:/snex2/data/fits/0m4/
+# - ${SNEX_2m_FITS_PATH}:/snex2/data/fits/fts/
+# - ${SNEX_MUSCAT_FITS_PATH}:/snex2/data/fits/2m0a/
+# - ${SNEX_GW_FITS_PATH}:/snex2/data/fits/gw/
+# - ${SNEX_FLOYDS_PATH}:/snex2/data/floyds/
+>>>>>>> 4040213 (something)
diff --git a/gw/hooks.py b/gw/hooks.py
index ee8aae03..680314ea 100644
--- a/gw/hooks.py
+++ b/gw/hooks.py
@@ -1,10 +1,10 @@
-import os
-from gw.models import GWFollowupGalaxies
+from .models import GWFollowupGalaxy
from tom_common.hooks import run_hook
-from tom_targets.models import Target, TargetExtra
+from tom_targets.models import TargetExtra
from tom_nonlocalizedevents.models import EventSequence
-from custom_code.views import cancel_observation, Snex1ConnectionError
-from custom_code.hooks import _return_session, _load_table
+from ..custom_code.views import cancel_observation, Snex1ConnectionError
+from ..custom_code.hooks import _return_session
+from tom_observations.models import ObservationRecord
import logging
from django.conf import settings
@@ -23,7 +23,7 @@ def cancel_gw_obs(galaxy_ids=[], sequence_id=None, wrapped_session=None):
return
if galaxy_ids:
- galaxies = GWFollowupGalaxies.objects.filter(id__in=galaxy_ids)
+ galaxies = GWFollowupGalaxy.objects.filter(id__in=galaxy_ids)
elif sequence_id:
sequence = EventSequence.objects.get(id=sequence_id)
@@ -69,49 +69,3 @@ def cancel_gw_obs(galaxy_ids=[], sequence_id=None, wrapped_session=None):
else:
logger.info('Finished canceling GW follow-up observations for sequence {}'.format(sequence_id))
-
-def ingest_gw_galaxy_into_snex1(target_id, event_id, wrapped_session=None):
-
- if wrapped_session:
- db_session = wrapped_session
-
- else:
- db_session = _return_session(_snex1_address)
-
- o4_galaxies = _load_table('o4_galaxies', db_address=_snex1_address)
-
- existing_target = db_session.query(o4_galaxies).filter(o4_galaxies.targetid==target_id)
- if existing_target.count() > 0:
- if any([t.event_id == event_id for t in existing_target]):
- logger.info('Already ingested target {} into o4_galaxies table for event {}'.format(target_id, event_id))
-
- else:
- logger.info('Found existing target {} in o4_galaxies table, copying it'.format(target_id))
- existing_target_row = existing_target.first()
- existing_table = existing_target_row.__table__ #Somehow this is different than the o4_galaxies object
-
- non_pk_columns = [k for k in existing_table.columns.keys() if k not in existing_table.primary_key.columns.keys()]
- data = {c: getattr(existing_target_row, c) for c in non_pk_columns}
- data['event_id'] = event_id
-
- db_session.add(o4_galaxies(**data))
-
- else:
- snex2_target = Target.objects.get(id=target_id)
- ra0 = snex2_target.ra
- dec0 = snex2_target.dec
-
- db_session.add(o4_galaxies(targetid=target_id, event_id=event_id, ra0=ra0, dec0=dec0))
-
- if not wrapped_session:
- try:
- db_session.commit()
- except:
- db_session.rollback()
- finally:
- db_session.close()
-
- else:
- db_session.flush()
-
- logger.info('Finished ingesting target {} into o4_galaxies'.format(target_id))
diff --git a/gw/run_template_search.py b/gw/run_template_search.py
new file mode 100644
index 00000000..d91ffed4
--- /dev/null
+++ b/gw/run_template_search.py
@@ -0,0 +1,151 @@
+# -*- coding: utf-8 -*-
+"""run_template_search.py
+
+Script to search and (if exist) download template
+images from available optical surveys. The pixel scale, and the size
+of the resulting template will match the characteristics of the LCO
+instrument specified. The objects will also be inserted in the
+`o4_galaxy` table in the SNEx1 database, as well as the path to the
+downloaded files. Note that the root folder of the download is
+specified in the :__init__.py: of :survey_queries:.
+
+The :dramatiq: decorator makes it possible to run it asynchronously.
+
+"""
+
+from .survey_queries.query import template_query
+from custom_code.hooks import _return_session, _load_table
+import logging
+from django.conf import settings
+from tom_targets.models import Target
+import dramatiq
+import datetime
+
+TABLE_NAME = 'o4_galaxies'
+DEFAULT_FILTERS = 'gri'
+DEFAULT_SURVEYS = ['PS1','SDSS']
+DEFAULT_INSTRUMENTS = 'all'
+
+logger = logging.getLogger(__name__)
+
+@dramatiq.actor(max_retries=0)
+def search_templates_and_update_snex1(_targets_list, _wrapped_session, _filters=DEFAULT_FILTERS, _surveys=DEFAULT_SURVEYS, _instruments=DEFAULT_INSTRUMENTS):
+
+ if _wrapped_session:
+ db_session = _wrapped_session
+
+ else:
+ db_session = _return_session(settings.SNEX1_DB_URL)
+
+ gw_table = _load_table(TABLE_NAME, db_address=settings.SNEX1_DB_URL)
+ photlco = _load_table('photlco', db_address=settings.SNEX1_DB_URL)
+
+ for target_id, event_id in _targets_list:
+
+ existing_target = db_session.query(gw_table).filter(gw_table.targetid==target_id)
+ if existing_target.count() > 0:
+ if any([t.event_id == event_id for t in existing_target]):
+ logger.info('Already ingested target {} into {} table for event {}'.format(target_id, TABLE_NAME, event_id))
+
+ else:
+ logger.info('Found existing target {} in {} table, copying it'.format(target_id, TABLE_NAME))
+ existing_target_row = existing_target.first()
+ existing_table = existing_target_row.__table__ #Somehow this is different than the o4_galaxies object
+
+ non_pk_columns = [k for k in existing_table.columns.keys() if k not in existing_table.primary_key.columns.keys()]
+ data = {c: getattr(existing_target_row, c) for c in non_pk_columns}
+ data['event_id'] = event_id
+
+ db_session.add(gw_table(**data))
+
+ else:
+ t = Target.objects.get(id=target_id)
+
+ s = template_query(t.name, t.ra, t.dec, _filters, _instruments)
+
+ if 'PS1' in _surveys:
+ s.search_for_PS1()
+ if 'SDSS' in _surveys:
+ s.search_for_SDSS()
+
+ db_session.add(gw_table(targetid = target_id, event_id = event_id, ra0=t.ra, dec0=t.dec, **s.templates_paths))
+
+ for template in s.templates_paths:
+ db_session.add(photlco(**photlco_dict(target_id, s.templates_paths[template])))
+
+
+ if not _wrapped_session:
+ try:
+ db_session.commit()
+ except:
+ db_session.rollback()
+ finally:
+ db_session.close()
+
+ else:
+ db_session.flush()
+
+ logger.info('Finished ingesting target {} into {}'.format(target_id, TABLE_NAME))
+
+def photlco_dict(_targetid, _template_path):
+
+ from survey_queries import SURVEY
+
+ templeate_header = pf.getheader(_template_path)
+ path_list = _template_path.spit('/')
+
+ now = datetime.datetime.now(datetime.UTC).strftime("%Y-%m-%d %H:%M:%S.%f")
+
+ phot_lco_dic = {
+ 'targetid': _targetid,
+ 'objname': templeate_header['OBJECT'],
+ 'dayobs': templeate_header['DAY-OBS'],
+ 'dateobs': templeate_header['DATE-OBS'].split('T')[0],
+ 'ut': templeate_header['DATE-OBS'].split('T')[1],
+ 'mjd': templeate_header['MJD-OBS'],
+ 'exptime':1,
+ 'filter': templeate_header['FILTER'],
+ 'telescopeid': SURVEY[templeate_header['SURVEY']].telescopeid,
+ 'instrumentid': SURVEY[templeate_header['SURVEY']].instrumentid,
+ 'telescope': templeate_header['TELESCOP'],
+ 'instrument': templeate_header['INSTRUME'],
+ 'mag': 9999,
+ 'dmag': 9999,
+ 'airmass': 1,
+ 'wcs': 0,
+ 'psf': 'X',
+ 'apmag': 9999,
+ 'psfx': 9999,
+ 'psfy': 9999,
+ 'psfmag': 9999,
+ 'psfdmag': 9999,
+ 'z1': 9999,
+ 'z2': 9999,
+ 'zn': 9999,
+ 'c1': 9999,
+ 'c2': 9999,
+ 'dz1': 9999,
+ 'dz2': 9999,
+ 'dc1': 9999,
+ 'dc2': 9999,
+ 'quality': 127,
+ 'zcat': 'X',
+ 'abscat': 'X',
+ 'fwhm': 9999,
+ 'magtype': 1,
+ 'ra0': templeate_header['RA'],
+ 'dec0': templeate_header['DEC'],
+ 'tracknumber': 0,
+ 'filename': path_list[-1],
+ 'filetype': 4,
+ 'filepath': '/'.join(path_list[:-1])+'/',
+ 'groupidcode': 2199023255552,
+ 'datecreated': now,
+ 'lastmodified': now,
+ 'apflux': 9999,
+ 'dapflux': 9999,
+ 'dapmag': 9999,
+ 'lastunpacked': now
+ }
+
+ return phot_lco_dic
diff --git a/gw/survey_queries/DECam_search.py b/gw/survey_queries/DECam_search.py
new file mode 100644
index 00000000..11d46182
--- /dev/null
+++ b/gw/survey_queries/DECam_search.py
@@ -0,0 +1,127 @@
+# -*- coding: utf-8 -*-
+"""DECam_search.py
+
+"""
+
+import urllib.request
+from numpy.core.defchararray import startswith
+from astropy import units as u
+from pyvo.dal import sia
+#from astroquery.hips2fits import hips2fits
+#from astropy import wcs as astropy_wcs
+import astropy.io.fits as pf
+
+# List of available services at:
+# https://datalab.noirlab.edu/docs/manual/UsingAstroDataLab/DataAccessInterfaces/SimpleImageAccessSIA/SimpleImageAccessSIA.html
+#COLLECTION = 'coadd_all'
+COLLECTION = 'ls_dr9'
+# COLLECTION = 'coadd/decaps_dr2'
+# COLLECTION = 'delve_dr2'
+DECAM_SERVICE = f'https://datalab.noirlab.edu/tap/{COLLECTION}'
+
+DECAM_SERVICE = f'https://datalab.noirlab.edu/sia/{COLLECTION}'
+
+
+#url = 'https://alasky.cds.unistra.fr/hips-image-services/hips2fits#hips=CDS%2FP%2FDECaPS%2FDR2%2Fg&ra=119.51849999999999&dec=-27.298400000000004&fov=0.4&projection=TAN&format=fits'
+#url = 'https://alasky.cds.unistra.fr/hips-image-services/hips2fits?hips=CDS%2FP%2FDECaPS%2FDR2%2Fg&width=1200&height=900&fov=0.4&projection=TAN&coordsys=icrs&rotation_angle=0.0&ra=119.51849999999999&dec=-27.298400000000004&format=fits'
+#url = 'https://alasky.cds.unistra.fr/hips-image-services/hips2fits?hips=CDS%2FP%2FPanSTARRS%2FDR1%2Fg&width=1200&height=900&fov=0.4&projection=TAN&coordsys=icrs&rotation_angle=0.0&ra=48.837009&dec=41.611606&format=fits'
+#urllib.request.urlretrieve(url, 'test.fits')
+
+# hips2fits.timeout = 180
+
+# w = astropy_wcs.WCS(header={
+# 'NAXIS1': 4100, # Width of the output fits/image
+# 'NAXIS2': 4100, # Height of the output fits/image
+# 'WCSAXES': 2, # Number of coordinate axes
+# 'CRPIX1': 2050, # Pixel coordinate of reference point
+# 'CRPIX2': 2050, # Pixel coordinate of reference point
+# 'CDELT1': -0.000108056, # [deg] Coordinate increment at reference point
+# 'CDELT2': 0.000108056, # [deg] Coordinate increment at reference point
+# 'CUNIT1': 'deg', # Units of coordinate increment and value
+# 'CUNIT2': 'deg', # Units of coordinate increment and value
+# 'CTYPE1': 'RA---TAN', # galactic longitude, Mollweide's projection
+# 'CTYPE2': 'DEC--TAN', # galactic latitude, Mollweide's projection
+# 'CRVAL1': 48.837009, # [deg] Coordinate value at reference point
+# 'CRVAL2': 41.611606, # [deg] Coordinate value at reference point
+# })
+# hips = 'CDS/P/PanSTARRS/DR1/g'
+# result = hips2fits.query_with_wcs(
+# hips=hips,
+# wcs=w
+# )
+
+# result.writeto('test.fits', overwrite=True)
+
+def search_for_DECam(self, survey):
+
+ from . import generate_FOV_grid, LCO_INSTRUMENTS, SURVEYS
+
+ #
Survey:
+ #
+ #
+
+
+
+ 'https://datalab.noirlab.edu/query/query?sql=" + survey_temp + "+q3c_radial_query(s_ra,+s_dec,+" + items[_i_ra] + ",+" + items[_i_dec] + ",+2)+)+SELECT+access_url,instrument_name,obs_bandpass,exptime,prodtype,proctype,date_obs+FROM+region+WHERE+(abs(spat_hilimit1+-+spat_lolimit1)+<+90.0+AND+("+items[_i_ra]+"+BETWEEN+spat_lolimit1+AND+spat_hilimit1)+AND+("+items[_i_dec]+"+BETWEEN+spat_lolimit2+AND+spat_hilimit2))+OR+(abs(spat_hilimit1+-+spat_lolimit1)+>+90.0+AND+(+("+items[_i_ra]+"+BETWEEN+0.0+AND+spat_lolimit1)+OR+("+items[_i_ra]+"+BETWEEN+spat_hilimit1+AND+360.0))+AND+("+items[_i_dec]+"+BETWEEN+spat_lolimit2+AND+spat_hilimit2))+ORDER+BY+date_obs+DESC&ofmt=csv&out=none&async=false";'
+
+
+ connect = sia.SIAService(DECAM_SERVICE)
+ table = connect.search(pos = (self.coord.ra.deg, self.coord.dec.deg), size = (LCO_INSTRUMENTS['sinistro'].fov.to(u.deg).value, LCO_INSTRUMENTS['sinistro'].fov.to(u.deg).value), verbosity=2).to_table()
+
+ print('something')
+ with open('table.csv','w') as out:
+ for c in table.columns:
+ out.write(c+',')
+ out.write('\n')
+ for i in range(len(table[table.columns[0]])):
+ for c in table.columns:
+ out.write(str(table[c][i])+',')
+ out.write('\n')
+
+ exit()
+
+
+ for flt in self.filters:
+
+ sel = (table['prodtype'] == 'image') & (startswith(table['obs_bandpass'].astype(str), flt))
+
+ if len(table[sel]) != 0:
+ out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.fits'
+ urllib.request.urlretrieve(table[sel]['access_url'][0], out_name)
+ self.templates_paths[f'{survey}_{flt}'] = out_name
+ with pf.open(out_name, mode='update') as hdu:
+ hdu[0].header['MJD-OBS'] = hdu[0].header['MJD_MEAN']
+ hdu[0].header['DATE-OBS'] = hdu[0].header['DATEOBS']
+ hdu[0].header['DAY-OBS'] = hdu[0].header['DATE-OBS'].split('T')[0]
+ hdu[0].header['FILTER'] = flt+'p' #note, this works only with griz
+ hdu[0].header['OBJECT'] = self.obj
+ hdu[0].header['WCSERR'] = 0
+ hdu[0].header['RA'] = self.coord.ra.deg
+ hdu[0].header['DEC'] = self.coord.dec.deg
+
+ hdu.flush()
+ else:
+ print(f'Coordinates {self.coord.ra.deg} {self.coord.dec.deg} not in DECam with filter {flt}.')
\ No newline at end of file
diff --git a/gw/survey_queries/PanSTARRS_search.py b/gw/survey_queries/PanSTARRS_search.py
new file mode 100644
index 00000000..5b21416f
--- /dev/null
+++ b/gw/survey_queries/PanSTARRS_search.py
@@ -0,0 +1,220 @@
+# -*- coding: utf-8 -*-
+"""PanSTARRS_search.py
+
+"""
+
+from astropy.table import Table
+from astropy import units as u
+from astropy.time import Time
+from astropy.wcs import WCS
+import astropy.io.fits as pf
+import logging
+
+logger = logging.getLogger(__name__)
+
+# PS1 urls
+PS1_TAB = 'https://ps1images.stsci.edu/cgi-bin/ps1filenames.py'
+
+# PS1 provides a cutout API. We probably prefer to use the whole
+# skycell, but we can easily change that with this bool variable
+SKYCELL = True
+if SKYCELL:
+ PS1_CUT = 'https://ps1images.stsci.edu/'
+else:
+ PS1_CUT = 'https://ps1images.stsci.edu/cgi-bin/fitscut.cgi'
+
+
+def search_for_PS1(query, survey):
+ '''
+ This function searches and downloads PS1 templates.
+ Together with the data, it will fetch also the weight images (i.e. 1/variance)
+ and the mask images.
+
+ '''
+
+ from . import generate_FOV_grid, LCO_INSTRUMENTS, SURVEYS
+
+ # Instruments are sorted by FOV size.
+ # :query.instruments[0]: is the instrument with the biggest FOV
+ # and therefore requires the biggest template. We start creating
+ # this, and instead of repeating the procedure, we just scale down
+ # this first template to match the smaller requirements.
+ inst = query.instruments[0]
+
+ # Generate grid of points covering the FOV
+ grid = generate_FOV_grid(_center = query.coord, _fov = LCO_INSTRUMENTS[inst].fov, _step=SURVEYS[survey].skycell_size)
+
+ # NOTE: wt are actually variance maps
+ data_lists = {'stack':{}, 'mask':{}, 'wt':{}}
+
+ while True:
+ # Initially :grid[0]: will be the center of the field.
+ # After each iteration, the :grid: array is trimmed down
+ # and its first element will be the point on the grid
+ # most distant from the center
+
+ # Creating the url to query the PS1 database.
+ # NOTE: PS1 needs the size in pixels, considering 0.25 arcsec/pixel.
+ # This first query returns a Table with a list of files
+ logger.info(f'Querying the PS1 database for coordinates {grid[0].ra.deg} {grid[0].dec.deg}.')
+ tableurl = f'{PS1_TAB}?ra={grid[0].ra.deg}&dec={grid[0].dec.deg}&size={LCO_INSTRUMENTS[inst].fov.to(u.arcsec).value/0.25}&format=fits&filters={query.filters}&type=stack,stack.wt,stack.mask'
+ table = Table.read(tableurl, format='ascii')
+
+ # If the table is empty, the images are not in PS1
+ if len(table) == 0:
+ logger.info(f'\nThe provided coordinates {grid[0].ra.deg},{grid[0].dec.deg} do not appear to be in the PS1 footprint.\n')
+ return
+
+ logger.info('Coordinates in PS1 footprint.')
+ for tab in table:
+ flt = tab['filter']
+ filename = tab['filename']
+ filetype = tab['type'].split('.')[-1]
+
+ # Initializing dictionaries
+ if flt not in data_lists[filetype]:
+ data_lists[filetype][flt]=[]
+ data_lists[filetype][flt]=[]
+ data_lists[filetype][flt]=[]
+
+ if SKYCELL:
+ url = f'{PS1_CUT}{filename}'
+ else:
+ url = f'{PS1_CUT}?ra={grid[0].ra.deg}&dec={grid[0].dec.deg}&size=6000&format=fits&red={filename}'
+
+ logger.info(f'Retrieving file {filename}')
+ with pf.open(url) as hdulist:
+ # The skycell headers need some adjustments
+ if SKYCELL:
+ #hdulist[1].header['TIMESYS'] = 'TAI'
+ hdulist[1].header['RADESYSa'] = 'FK5'
+ hdulist[1].header['DATE-OBS'] = 'T'.join(Time(hdulist[1].header['MJD-OBS'], format='mjd', scale='utc').iso.split())
+ hdulist[1].header['PC1_1'] = hdulist[1].header['PC001001']
+ hdulist[1].header['PC1_2'] = hdulist[1].header['PC001002']
+ hdulist[1].header['PC2_1'] = hdulist[1].header['PC002001']
+ hdulist[1].header['PC2_2'] = hdulist[1].header['PC002002']
+ del hdulist[1].header['PC001001']
+ del hdulist[1].header['PC001002']
+ del hdulist[1].header['PC002001']
+ del hdulist[1].header['PC002002']
+
+ # The algorithm for decompressing fits file data
+ # doesn't work on PS1 skycells since astropy version 5.3
+ # The easiest way to fix this, is to remove the
+ # 'ZBLANK' key (assuming it's equal to 'BLANK')
+ if 'ZBLANK' in hdulist[1].header:
+ del hdulist[1].header['ZBLANK']
+
+ # The image flux scaling is also non-standard for the stack images
+ if filetype != 'mask':
+ hdulist[1].data = hdulist[1].header['BOFFSET'] + hdulist[1].header['BSOFTEN'] * (10**(0.4*hdulist[1].data) - 10**(-0.4*hdulist[1].data))
+ del hdulist[1].header['BLANK']
+
+ # Getting rid of the edge nans
+ fixed_hdu = trim_nan(hdulist[1])
+ else:
+ fixed_hdu = trim_nan(hdulist[0])
+
+ del fixed_hdu.header['RADESYS']
+
+ # Since the weight images are actually variance maps, we need
+ # to take the reciprocal of it
+ if filetype == 'wt':
+ fixed_hdu.data = 1/fixed_hdu.data
+
+ data_lists[filetype][flt].append(fixed_hdu)
+
+ for skycell in data_lists['stack'][query.filters[0]]:
+ #trimming down the grid, leaving the points currently not in any skycell
+ grid = [p for p in grid if not p.contained_by(WCS(skycell.header))]
+
+ if len(grid) == 0:
+ break
+
+ return data_lists['stack'], data_lists['wt']
+
+
+def trim_nan(_hdu):
+ '''
+ When downloading an image from PS1, the image is padded with Nan.
+ This function, trim the image, removing the np.nan array, and preserving
+ the WCS.
+
+ NOTE: the flux of the full skycell stack images are scaled. Astropy
+ rescale the data automatically as soon as the data attribute of the
+ hdu is accessed. See https://docs.astropy.org/en/stable/io/fits/usage/image.html
+ for more details.
+
+ *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
+ :param _hdu: HUDList containing the data to trim
+ :type _hdu: `~astropy.io.fits.HDUList`
+
+ *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
+ :return: HDUList with edges trimmed
+ :rtype: `~astropy.io.fits.HDUList`
+
+ '''
+
+ from astropy.nddata import Cutout2D
+ import numpy as np
+ import math
+
+ # Getting all the columns with only np.nan in them
+ good_cols = np.all(np.isnan(_hdu.data),axis=1)
+
+ # Retrieving the first and the last index of good_cols == False,
+ # i.e. the first and last columns containing (at least partially)
+ # real data
+ for i in range(len(good_cols)):
+ if not good_cols[i]:
+ # Adding 10 pixel padding to account for edge artifact
+ first_col = i+10
+ break
+
+ for i in range(len(good_cols)-1,-1,-1):
+ if not good_cols[i]:
+ # We want to include the index, so we add 1, butting the
+ # edge right after the column. Subtracting 10 pixel padding
+ # to account for edge artifact.
+ last_col = i+1-10
+ break
+
+
+ # Doing the same with the rows
+ good_rows = np.all(np.isnan(_hdu.data),axis=0)
+ for i in range(len(good_rows)):
+ if not good_rows[i]:
+ first_row = i+10
+ break
+
+ for i in range(len(good_rows)-1,-1,-1):
+ if not good_rows[i]:
+ last_row = i+1-10
+ break
+
+ size_x = last_col-first_col
+ size_y = last_row-first_row
+ cen_x = math.floor((last_col+first_col)/2.)
+ cen_y = math.floor((last_row+first_row)/2.)
+
+ # No idea why x and y are inverted between the center and the size
+ # ¯\_(ツ)_/¯
+ cut = Cutout2D(_hdu.data, (cen_y,cen_x), (size_x,size_y), wcs=WCS(_hdu.header))
+
+ out_hdu = pf.PrimaryHDU()
+ out_hdu.data = cut.data
+ out_hdu.header = _hdu.header.copy()
+ out_hdu.header.update(cut.wcs.to_header())
+
+ return out_hdu
+
+def track_skycells_in_header(_header, _data):
+ # Keeping track of the single skycells that are being combined.
+ _header['NSKYCELL'] = (len(_data), 'Number of SDSS skycells merged')
+ for i,im in enumerate(_data, start=1):
+ _header[f'STK_TY{i:>02}'] = (im.header['STK_TYPE'], f'type of stack {i}')
+ _header[f'STK_ID{i:>02}'] = (im.header['STK_ID'], f'type of stack {i}')
+ _header[f'SKYCEL{i:>02}'] = (im.header['SKYCELL'], f'type of stack {i}')
+ _header[f'TES_ID{i:>02}'] = (im.header['TESS_ID'], f'type of stack {i}')
+
+ return _header
\ No newline at end of file
diff --git a/gw/survey_queries/SDSS_search.py b/gw/survey_queries/SDSS_search.py
new file mode 100644
index 00000000..6605336d
--- /dev/null
+++ b/gw/survey_queries/SDSS_search.py
@@ -0,0 +1,182 @@
+# -*- coding: utf-8 -*-
+"""SDSS_search.py
+
+"""
+
+from astroquery.sdss import SDSS
+from astropy import units as u
+from astropy.time import Time
+from astropy.wcs import WCS
+import astropy.io.fits as pf
+import numpy as np
+import logging
+
+logger = logging.getLogger(__name__)
+
+def search_for_SDSS(self, survey):
+ '''
+ This function searches and downloads SDSS templates.
+ Together with the data, it will fetch also the variance images
+
+ '''
+
+ # Instruments are sorted by FOV size.
+ # :self.instruments[0]: is the instrument with the biggest FOV
+ # and therefore requires the biggest template. We start creating
+ # this, and instead of repeating the procedure, we just scale down
+ # this first template to match the smaller requirements.
+
+ from . import generate_FOV_grid, LCO_INSTRUMENTS, SURVEYS
+
+ inst = self.instruments[0]
+
+ # Generate grid of points covering the FOV
+ grid = generate_FOV_grid(_center = self.coord, _fov = LCO_INSTRUMENTS[inst].fov, _step=SURVEYS[survey].skycell_size)
+
+ all_pointings = []
+
+ img_data_list = {}
+ weight_data_list = {}
+
+ for f,flt in enumerate(self.filters):
+
+ img_data_list[flt] = []
+ weight_data_list[flt] = []
+
+ # For the first filter, we need to collect the references for all the required SDSS fields
+ # We don't need to repeat the query for the following filters
+ # The SDSS fields references are stored in :all_pointings:
+ if f == 0:
+ while True:
+ # SDSS queries radius must be less than 3.0 arcmin.
+ # Initially :grid[0]: will be the center of the field.
+ # After each iteration, the :grid: array is trimmed down and its first element will be
+ # the point on the grid most distant from the center
+ logger.info(f'Querying the SDSS database for coordinates {grid[0].ra.deg} {grid[0].dec.deg}.')
+ xid = SDSS.query_region(grid[0], radius=3*u.arcmin, spectro=False)
+ if xid is None:
+ logger.info(f'\nThe provided coordinates {grid[0].ra.deg},{grid[0].dec.deg} do not appear to be in the SDSS footprint.\n')
+ return
+
+ logger.info('Coordinates in SDSS footprint.')
+ new_pointings = [list(el) for el in np.unique(xid['run','camcol','field']) if list(el) not in all_pointings]
+
+ all_pointings = all_pointings + new_pointings
+
+ for run, camcol, field in new_pointings:
+ #fetching the fits as an astropy hdu
+ logger.info(f'Retrieving file with run={run}, camcol={camcol}, field={field} and filter {flt}.')
+ im = SDSS.get_images(run=run, camcol=camcol, field=field, band=flt, cache=True)[0]
+
+ sky_subtracted_original_image_hdu, weight_image_hdu = elaborate_SDSS(im, flt, run, camcol, field)
+ img_data_list[flt].append(sky_subtracted_original_image_hdu)
+ weight_data_list[flt].append(weight_image_hdu)
+
+
+ for tile in img_data_list[flt]:
+ grid = [p for p in grid if not p.contained_by(WCS(tile.header))]
+
+ if len(grid) == 0:
+ break
+
+ else:
+ #Note that we changed the list to all_pointings now that we have them all
+ for run, camcol, field in all_pointings:
+ #fetching the fits as an astropy hdu
+ im = SDSS.get_images(run=run, camcol=camcol, field=field, band=flt, cache=True)[0]
+
+ sky_subtracted_original_image_hdu, weight_image_hdu = elaborate_SDSS(im, flt, run, camcol, field)
+ img_data_list[flt].append(sky_subtracted_original_image_hdu)
+ weight_data_list[flt].append(weight_image_hdu)
+
+ return img_data_list, weight_data_list
+
+
+
+def elaborate_SDSS(_im, _flt, _run, _camcol, _field):
+ '''
+ SDSS images require a little bit of manipulation.
+ This function will produce a ready to be used image
+ as well as its corresponding weight image.
+
+ *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
+ :param _im: SDSS image HDUList
+ :type _im: `~astropy.io.fits.HDUList`
+
+ :param _flt: SDSS filter
+ :type _flt: string
+
+ :param _run: Run number
+ :type _run: int
+
+ :param _camcol: Column in the imaging camera
+ :type _camcol: int
+
+ :param _field: Field number
+ :type _field: int
+
+ *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
+ :return: HDU of the image, HDU of the weight image
+ :rtype: list
+
+ '''
+
+ from scipy import interpolate
+
+ header = _im[0].header
+
+ #Although we do not write out each single tile, this edits in the header will
+ #make it easier to keep track of each tile in the merged fits file
+ header['RADESYSa'] = header['RADECSYS']
+ del header['RADECSYS']
+ header['MJD-OBS'] = Time(f"{header['DATE-OBS']} {header['TAIHMS']}", format='iso', scale='tai').mjd
+ header['DATE-OBS'] = 'T'.join(Time(header['MJD-OBS'], format='mjd', scale='utc').iso.split())
+ #For some reason the field info is not stored in the header. There is a "FRAME" key, but it's different
+ header['FIELD'] = _field
+
+ #retrieve gain and darkVariance from another online table
+ q = SDSS.query_sql(f'SELECT DISTINCT gain_{_flt}, darkVariance_{_flt} FROM field WHERE run={_run} AND camcol={_camcol}')
+ gain = q[f'gain_{_flt}'][0]
+ darkVariance = q[f'darkVariance_{_flt}'][0]
+
+ #im[2] contains the sky
+ sky, x, y = _im[2].data[0]
+ x_old = np.arange(np.shape(sky)[0])
+ y_old = np.arange(np.shape(sky)[1])
+
+ # `interp2d` is deprecated in SciPy 1.10 and will be removed in SciPy 1.14.0
+ # se We are using `interpolate.RegularGridInterpolator` instead
+ # NOTE: `interp2d` has x and y inverted but doesn't need a `meshgrid` to produce the output
+ # This is the code if `interp2d` is still preferred despite of its deprecation
+ # tck = interpolate.interp2d(y_old, x_old, sky, kind=method)
+ # sky_image = tck(x, y)
+
+ tck = interpolate.RegularGridInterpolator((x_old,y_old), sky, method="linear", bounds_error=False, fill_value = None)
+
+ # need to invert x and in y to produce the meshgrid with the right shape
+ x_grid, y_grid = np.meshgrid(y, x, indexing='ij')
+
+ sky_image = tck((x_grid, y_grid)) #in ADU
+
+ #im[1] contains a row with factors nanomaggies per counts for each column
+ calib_image = np.asarray([_im[1].data.tolist()]*np.shape(_im[0].data)[0])
+
+ sky_subtracted_original_image = _im[0].data / calib_image #in ADU
+
+ variance_image = ((_im[0].data / calib_image + sky_image) / gain + darkVariance)# in ADU * calib_image **2
+
+ sky_subtracted_original_image_hdu = pf.PrimaryHDU(sky_subtracted_original_image, header=header)
+ #to reproject and combine, we just need the weight, not the variance
+ weight_image_hdu = pf.PrimaryHDU(1/variance_image, header=header)
+
+ return sky_subtracted_original_image_hdu, weight_image_hdu
+
+def track_skycells_in_header(_header, _data):
+ _header['NTILES'] = (len(_data), 'Number of SDSS tiles merged')
+ for i,im in enumerate(_data, start=1):
+ _header[f'RUN{i:>03}'] = (im.header['RUN'], f'Run number of tile {i}')
+ _header[f'CAMCO{i:>03}'] = (im.header['CAMCOL'], f'Column in the imaging camera of tile {i}')
+ _header[f'FIELD{i:>03}'] = (im.header['FIELD'], f'Field number of tile {i}')
+ _header[f'MJD{i:>03}'] = (im.header['MJD-OBS'], f'MJD of tile {i}')
+
+ return _header
\ No newline at end of file
diff --git a/gw/survey_queries/Skymapper_search.py b/gw/survey_queries/Skymapper_search.py
new file mode 100644
index 00000000..286625a5
--- /dev/null
+++ b/gw/survey_queries/Skymapper_search.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+"""Skymapper_search.py
+
+"""
+
+from astropy.io.votable import parse
+import io
+
+# Skymapper urls
+SKY_CUT = 'https://api.skymapper.nci.org.au/public/siap/dr4/query'
+
+@set_survey('Skymapper')
+def search_for_Skymapper(self, survey):
+ '''
+ Skymapper has the ability to search for multiple filters in one go,
+ but the returned list of file loses the information on the filter,
+ so I need to send a separate query for each filter.
+ Biggest size for Skymapper is 0.17 deg
+
+ '''
+ from astropy import units as u
+ for flt in self.filters:
+
+ url = f'{SKY_CUT}?POS={self.coord.ra.deg},{self.coord.dec.deg}&SIZE=0.17,0.17&BAND={flt}&FORMAT=image/fits&INTERSECT=covers&RESPONSEFORMAT=VOTABLE'
+
+
+ u = urllib.request.urlopen(url)
+ s = io.BytesIO(u.read())
+ votable = parse(s)
+
+ for resource in votable.resources:
+ for t in resource.tables:
+ table = t.array['get_fits'][:]
+
+ if len(table) != 0:
+ out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.fits'
+ urllib.request.urlretrieve(table[0], out_name)
+ self.templates_paths[f'{survey}_{flt}'] = out_name
+ with pf.open(out_name, mode='update') as hdu:
+ hdu[0].header['DAY-OBS'] = hdu[0].header['DATE-OBS'].split('T')[0]
+ hdu[0].header['FILTER'] = flt+'p' #note, this works only with griz
+ hdu[0].header['OBJECT'] = self.obj
+ hdu[0].header['WCSERR'] = 0
+ hdu[0].header['RA'] = self.coord.ra.deg
+ hdu[0].header['DEC'] = self.coord.dec.deg
+
+ hdu.flush()
+
+ else:
+ print(f'Coordinates {self.coord.ra.deg} {self.coord.dec.deg} not in Skymapper with filter {flt}.')
\ No newline at end of file
diff --git a/gw/survey_queries/__init__.py b/gw/survey_queries/__init__.py
new file mode 100644
index 00000000..f0935f62
--- /dev/null
+++ b/gw/survey_queries/__init__.py
@@ -0,0 +1,146 @@
+from astropy import units as u
+import os
+import numpy as np
+
+from . import PanSTARRS_search
+from . import SDSS_search
+
+# Folder definitions
+OUT_FOLDER = os.getenv('GWTEMPLATES_FOLDER')
+#OUT_FOLDER = '/supernova/data/extdata/GW_templates/O4'
+
+class Instrument:
+ def __init__(self, _fov, _resolution):
+ self.fov = _fov
+ self.resolution = _resolution
+
+class Survey:
+ def __init__(self, _telescope, _telescopeid, _instrument, _instrumentid, _skycell_size, _track_skycells_in_header):
+ self.telescope = _telescope
+ self.telescopeid = _telescopeid
+ self.instrument = _instrument
+ self.instrumentid = _instrumentid
+ self.skycell_size = _skycell_size
+ self.track_skycells_in_header = _track_skycells_in_header
+
+# Specifications for different instruments
+# The FOV here is thought as the diameter of a circle that would circumscribe the LCO image.
+# This is effectively bigger than the actual FOV of each instrument, but it will guarantee
+# that the full FOV is covered given any rotation
+LCO_INSTRUMENTS = {
+ 'sinistro': Instrument(_fov = 26.5*u.arcmin, _resolution = 0.389*u.arcsec),
+ 'muscat' : Instrument(_fov = 9.1*u.arcmin, _resolution = 0.27*u.arcsec),
+ # The FOV of 'qhy' is rectangular, so we calculate the side of a square having the same diagonal.
+ 'qhy' : Instrument(_fov = np.sqrt((1.9**2 + 1.2**2)/2)*u.deg, _resolution = 0.74*u.arcsec)
+}
+
+SURVEYS ={
+ 'PS1': Survey(_telescope = 'PS1', _telescopeid = 41, _instrument = 'GPC1', _instrumentid = 191, _skycell_size = 0.4*u.deg, _track_skycells_in_header = PanSTARRS_search.track_skycells_in_header),
+ 'SDSS': Survey(_telescope = 'APO 2.5m', _telescopeid = 139, _instrument = 'SDSS', _instrumentid = 42, _skycell_size = 10*u.arcmin, _track_skycells_in_header = SDSS_search.track_skycells_in_header)
+}
+
+def generate_FOV_grid(_center, _fov, _step, circle=True, wcs = None):
+ '''
+ The function generates a grid of points, each distant :_step: to each other.
+ The construction follows a recursive hexagonal ring, that becomes bigger and
+ bigger until it encompass the whole FOV. It takes advantage of the class
+ `~astropy.visualization.wcsaxes.SphericalCircle`, which creates a circle that
+ is formed of all the points that are within a certain angle of the central
+ coordinates on a sphere. Each hexagonal ring is built on top of these circles.
+
+ If :_step: is bigger than half of the fov size, then the grid would only have
+ the central point. This can cause the FOV to not be fully covered by the
+ template tiles. To fix this, in this case the grid will be just the central
+ point plus an octagon with a diagonal equal to half of the diagonal of the
+ FOV. The octagon is chosen to account for any rotation of the FOV.
+
+ If :circle: is True (default), the grid will trimmed to a circle with radius
+ equal to :_fov:
+
+ If :wcs: is provided, the points outside of the provided WCS are removed.
+
+ *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
+ :param _center: Coordinates of the center point of the grid
+ :type _center: `~astropy.units.Quantity` ['angle']
+
+ :param _fov: size of the FOV to tile with the grid. This assumes a square
+ FOV. This quantity effectively translates to the length of the
+ apothem of outermost hexagon, which will be equal to half of
+ the diagonal of the square FOV.
+ :type _fov: `~astropy.units.Quantity` ['angle']
+
+ :param _step: distance of each point in the grid. If a value is not
+ provided, it will default to half :_fov:
+ :type _step: `~astropy.units.Quantity` ['angle']
+
+ :param circle: If True, trims the grid to a circle with radius :_fov:
+ :type circle: bool
+ :default: True
+
+ :param wcs: WCS information. If provided the points outside of it will
+ be removed
+ :type wcs: `~astropy.wcs.WCS` or None
+ :default wcs: None
+
+ *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
+ :return: List of points of the grid. Each element is a astropy SkyCoord
+ :rtype: list
+
+ '''
+
+ from astropy.visualization.wcsaxes import SphericalCircle
+ from astropy.coordinates import SkyCoord
+ import math
+
+ points = [[_center, 0*u.deg]]
+
+ # If the the step is too big, we will take an octagonal ring
+ if _step > _fov/2:
+ vertexes = SphericalCircle(_center, radius = _fov*np.sqrt(2)/2, resolution=8).get_verts()
+ # :SphericalCircle: will produce a Polygon, so the first and the last
+ # point will coincide. Therefore we need to remove the last vertex.
+ for v in vertexes[:-1]:
+ p = SkyCoord(*v, unit=u.deg)
+ points.append([p, p.separation(_center)])
+
+ else:
+ # A square FOV of side A, its diagonal is A*sqrt(2). Therefore an
+ # hexagon will have to have a diagonal of A*sqrt(2)/sqrt(3) in order to contain
+ # it all, given any rotation. We add an extra order just to be sure
+
+ for ring in range(1, math.ceil(_fov/_step*np.sqrt(2)/np.sqrt(3))+1):
+ current_hexagon = []
+ vertexes = SphericalCircle(_center, radius = _step*ring, resolution=6).get_verts()
+ for v in vertexes:
+ p = SkyCoord(*v, unit=(u.deg,u.deg))
+ current_hexagon.append([p, p.separation(_center)])
+ #Filling the sides of the hexagon with points
+ for side in range(6):
+ # We need to split the side on the nth hexagon
+ # in n segments
+ for i in range(1, ring):
+ a = i / ring # rescale 0 < i < n --> 0 < a < 1
+ new_point = (1 - a) * current_hexagon[side][0].cartesian + a * current_hexagon[side+1][0].cartesian # interpolate cartesian coordinates
+ new_point = SkyCoord(new_point)
+ current_hexagon.append([new_point, new_point.separation(_center)])
+ # :SphericalCircle: will produce a Polygon, so the first and the last
+ # point will coincide. Therefore we need to remove the last vertex of
+ # the current hexagon.
+ current_hexagon.pop(6)
+ points = points + current_hexagon
+
+ if circle:
+ #adding 5 arcsec to give an extra buffer
+ points = [p for p in points if p[1] <= (_fov/2*np.sqrt(2))+5*u.arcsec]
+
+ if wcs:
+ points = [p for p in points if p[0].contained_by(wcs)]
+
+ # Sorting the list according to the distance, with the most distant points first
+ points.sort(key = lambda x: x[1], reverse=True)
+
+ #Putting the center back at the beginning
+ points = [points.pop()] + points
+
+ #Returning just the list of coordinates, without the distance
+ return [p[0] for p in points]
\ No newline at end of file
diff --git a/gw/survey_queries/query.py b/gw/survey_queries/query.py
new file mode 100644
index 00000000..eb3fa81d
--- /dev/null
+++ b/gw/survey_queries/query.py
@@ -0,0 +1,194 @@
+# -*- coding: utf-8 -*-
+"""survey_queries.py
+
+"""
+
+from astropy.coordinates import SkyCoord
+from astropy import units as u
+from astropy.time import Time
+from astropy.wcs import WCS
+import astropy.io.fits as pf
+import numpy as np
+import logging
+
+logger = logging.getLogger(__name__)
+
+from . import PanSTARRS_search, SDSS_search, DECam_search, generate_FOV_grid, LCO_INSTRUMENTS, SURVEYS, OUT_FOLDER
+
+class template_query:
+ '''
+ This class will query the surveys looking for the optical templates.
+ Each function is associated with a survey. First it will check if
+ the coordinates are in the footprint of that survey. Each function
+ will return True or False accordingly. In addition, if the coordinates
+ are indeed in the footprint, then the function will store the hdu of
+ the template in the 'hdu' attribute.
+
+ '''
+
+ def __init__(self, _obj, _ra, _dec, _filters, _inst):
+ '''
+ Initializing the class with the name of the object :_obj:,
+ the coordinates :_ra: and :_dec: and the filters :_filters: to
+ search.
+ The template images will be resampled to match the resolution
+ of the instrument (or list of instruments) provided by :_inst:.
+ The template FOV will also be as big as to include the whole
+ FOV of that instrument.
+ The empty dictionary :templates_paths: is also created.
+
+ *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
+
+ :_obj: Name of the object, mainly for file naming purposes
+ :type _obj: string
+
+ :_ra: RA coordinates of the object in deg, corresponding to
+ the center of the search
+ :type _ra: float
+
+ :_dec: Dec coordinates of the object in deg, corresponding to
+ the center of the search
+ :type _dec: float
+
+ :_filters: filters to search. If searching for multiple filters
+ use a single long string, e.g. 'gri'
+ :type _filters: string
+
+ :_inst: instrument name, or list of instrument names. In order
+ to be valid, the name will be checked if it is in the
+ dictionary :LCO_INSTRUMENTS:, defined above.
+ :type _inst: string or list
+
+ '''
+
+ self.obj = _obj
+ self.coord = SkyCoord(_ra, _dec, unit=(u.deg, u.deg))
+ self.filters = _filters
+
+ # Making the :instruments: list and checking the instruments exist.
+ if isinstance(_inst, str):
+ if _inst == 'all':
+ self.instruments = list(LCO_INSTRUMENTS.keys())
+ else:
+ self.instruments = [_inst]
+ else:
+ self.instruments = _inst
+ for i in self.instruments:
+ if i not in LCO_INSTRUMENTS.keys():
+ raise ValueError(f'Instrument `{i}` does not appear to be currently supported. If this is a new instrument, you will need to included it in `LCO_INSTRUMENTS`.')
+
+ # Sorting the instruments in decreasing FOV size.
+ self.instruments.sort(key = lambda x: LCO_INSTRUMENTS[x].fov, reverse=True)
+
+ self.templates_paths = {}
+
+
+ # Defining a decorator. Because decorators are cool
+ def set_survey(_survey):
+ '''
+ This decorator set the name of survey for the following function, through the
+ :_survey: variable, and initialize the :self.templates_paths: dictionary with
+ a '--' string for that specific survey. The idea is that by initializing the
+ dictionary, there will be a record that the search has been performed.
+ Otherwise the dictionary will not have any entry for that survey.
+
+ *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
+ :_survey: Name of the survey
+ :type _center: string
+
+ *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
+ :return: Decorated function
+ :rtype: function
+
+ '''
+ def decorate(function):
+ def initialize_survey(self):
+ # Initializing all the filters
+ for flt in self.filters:
+ for inst in self.instruments:
+ self.templates_paths[f'{inst}_{_survey}_{flt}'] = '--'
+ logger.info(f'Searching for {_survey} templates.\n')
+ return function(self, _survey)
+ return initialize_survey
+ return decorate
+
+ @set_survey('PS1')
+ def search_for_PS1(self, survey):
+ data, weights = PanSTARRS_search.search_for_PS1(self, survey)
+ self.make_templates(_data = data, _weights = weights, _survey = survey)
+
+ @set_survey('SDSS')
+ def search_for_SDSS(self, survey):
+ data, weights = SDSS_search.search_for_SDSS(self, survey)
+ self.make_templates(_data = data, _weights = weights, _survey = survey)
+
+ @set_survey('DECam')
+ def search_for_DECam(self, survey):
+ data, weights = DECam_search.search_for_DECam(self, survey)
+ self.make_templates(_data = data, _weights = weights, _survey = survey)
+
+
+ def make_templates(self, _data, _weights, _survey):
+
+ from reproject.mosaicking import find_optimal_celestial_wcs
+ from reproject import reproject_interp
+ from reproject.mosaicking import reproject_and_coadd
+
+ # We now trim the list of skycells according to each instrument FOV
+ # For the first instrument this step should be redundant since we
+ # selected the skycell in order to tile the FOV already.
+ for inst in self.instruments:
+ new_grid = generate_FOV_grid(_center = self.coord, _fov = LCO_INSTRUMENTS[inst].fov, _step=SURVEYS[_survey].skycell_size)
+ for flt in _data:
+ # Since enumerate() returns a generator and generators can't be reversed,
+ # we need to convert it to a list first
+ for skycell_idx, skycell in reversed(list(enumerate(_data[flt]))):
+ contained = False
+ for p in new_grid:
+ if p.contained_by(WCS(skycell.header)):
+ contained = True
+ break
+ # If no point of the grid is contained by the skycell,
+ # drop it from all the data lists
+ if not contained:
+ _data[flt].pop(skycell_idx)
+ _weights[flt].pop(skycell_idx)
+
+ logger.info(f'Reprojecting and combining skycells of filer {flt} for {inst}.')
+
+ wcs_out, shape_out = find_optimal_celestial_wcs(_data[flt], resolution=LCO_INSTRUMENTS[inst].resolution)
+ array, footprint = reproject_and_coadd(_data[flt], wcs_out, shape_out=shape_out, reproject_function=reproject_interp, input_weights = _weights[flt], combine_function='mean')
+ header = wcs_out.to_header()
+
+ header = SURVEYS[_survey].track_skycells_in_header(_header = header, _data = _data[flt])
+
+ # Populating the the header with the keys needed by the pipeline
+ header['RA'] = self.coord.ra.deg
+ header['DEC'] = self.coord.dec.deg
+ header['CAT-RA'] = self.coord.ra.deg
+ header['CAT-DEC'] = self.coord.dec.deg
+ header['MJD-OBS'] = (np.mean([im.header['MJD-OBS'] for im in _data[flt]]), 'Average MJD')
+ header['DATE-OBS'] = 'T'.join(Time(header['MJD-OBS'], format='mjd', scale='utc').iso.split())
+ header['DAY-OBS'] = header['DATE-OBS'].split('T')[0].replace('-', '')
+ header['TELESCOP'] = SURVEYS[_survey].telescope
+ header['INSTRUME'] = SURVEYS[_survey].instrument
+ header['SURVEY'] = _survey
+ header['OBJECT'] = self.obj
+ header['FILTER'] = flt+'p' # NOTE: this works only with griz
+ header['AIRMASS'] = 1
+ header['WCSERR'] = 0
+ header['PIXSCALE'] = (LCO_INSTRUMENTS[inst].resolution.value, 'arcsec/pixel')
+ header['BUNIT'] = ('counts', 'ADUs')
+
+ # Writing out the final combined template
+ out_name = f'{OUT_FOLDER}/{_survey}/{inst}_{self.obj}_{flt}'
+ self.templates_paths[f'{inst}_{_survey}_{flt}'] = out_name+'.fits'
+ pf.writeto(out_name+'.fits', array, header, overwrite=True)
+ logger.info(f'{out_name}.fits written out.')
+
+ # Hotpants needs the variance, so we need to write out 1/footprint
+ header['BUNIT'] = ('counts2', 'ADUs^2')
+ pf.writeto(out_name+'.var.fits', 1/footprint, header, overwrite=True)
+ logger.info(f'{out_name}.var.fits written out.')
+
+
diff --git a/gw/templatetags/gw_tags.py b/gw/templatetags/gw_tags.py
index 22e6dc16..5cf5115f 100644
--- a/gw/templatetags/gw_tags.py
+++ b/gw/templatetags/gw_tags.py
@@ -80,8 +80,11 @@ def galaxy_distribution(context, galaxies):
return {'figure': figure}
-@register.inclusion_tag('gw/plot_triplets.html', takes_context=True)
-def plot_triplets(context, triplet, galaxy, display_type):
+@register.inclusion_tag('gw/plot_triplets.html')#, takes_context=True)
+def plot_triplets(triplet, galaxy, display_type):
+
+ #This can be galaxy sizes times some factor.
+ SIZE = 0.9/60 #deg
plot_context = {}
@@ -95,10 +98,10 @@ def plot_triplets(context, triplet, galaxy, display_type):
if display_type == 'list':
###TODO: Change this:
- galaxy_coord = SkyCoord(228.691875, 31.223633, unit='deg')#galaxy.ra, galaxy.dec, unit='deg')
+ #galaxy_coord = SkyCoord(228.691875, 31.223633, unit='deg')#galaxy.ra, galaxy.dec, unit='deg')
#galaxy_pix_ra, galaxy_pix_dec = skycoord_to_pixel(galaxy_coord, wcs)
- img_coord_lower = SkyCoord(228.691875-0.9/60, 31.223633-0.9/60, unit='deg')
- img_coord_upper = SkyCoord(228.691875+0.9/60, 31.223633+0.9/60, unit='deg')
+ img_coord_lower = SkyCoord(galaxy.ra-SIZE, galaxy.dec-SIZE, unit='deg')
+ img_coord_upper = SkyCoord(galaxy.ra+SIZE, galaxy.dec+SIZE, unit='deg')
img_pixel_upper_ra, img_pixel_lower_dec = skycoord_to_pixel(img_coord_lower, wcs)
img_pixel_lower_ra, img_pixel_upper_dec = skycoord_to_pixel(img_coord_upper, wcs)
diff --git a/gw/tests.py b/gw/tests.py
deleted file mode 100644
index 7ce503c2..00000000
--- a/gw/tests.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from django.test import TestCase
-
-# Create your tests here.
diff --git a/gw/views.py b/gw/views.py
index 8f9d9318..d7e6d276 100644
--- a/gw/views.py
+++ b/gw/views.py
@@ -14,16 +14,17 @@
import sep
from datetime import datetime, timedelta
from tom_nonlocalizedevents.models import NonLocalizedEvent, EventSequence, EventLocalization
-from gw.models import GWFollowupGalaxy
-from gw.forms import GWGalaxyObservationForm
-from gw.treasure_map_utils import build_tm_pointings, submit_tm_pointings
+from .models import GWFollowupGalaxy
+from .forms import GWGalaxyObservationForm
+from .treasure_map_utils import build_tm_pointings, submit_tm_pointings
from tom_common.hooks import run_hook
from tom_targets.models import Target, TargetExtra
from tom_observations.facility import get_service_class
from tom_observations.models import ObservationRecord, ObservationGroup, DynamicCadence
-from custom_code.hooks import _return_session
+from custom_code.hooks import _return_session, _load_table
from custom_code.views import Snex1ConnectionError
import logging
+from .run_template_search import search_templates_and_update_snex1
logger = logging.getLogger(__name__)
@@ -61,6 +62,13 @@ class EventSequenceGalaxiesTripletView(TemplateView, LoginRequiredMixin):
template_name = 'gw/galaxy_observations.html'
def get_context_data(self, **kwargs):
+
+ db_session = _return_session(settings.SNEX1_DB_URL)
+
+ o4_galaxies = _load_table('o4_galaxies', db_address = settings.SNEX1_DB_URL)
+ photlco = _load_table('photlco', db_address = settings.SNEX1_DB_URL)
+
+
context = super().get_context_data(**kwargs)
sequence = EventSequence.objects.get(id=self.kwargs['id'])
@@ -69,42 +77,52 @@ def get_context_data(self, **kwargs):
galaxies = GWFollowupGalaxy.objects.filter(eventlocalization=loc)
galaxies = galaxies.annotate(name=F("id"))
context['galaxy_count'] = len(galaxies)
- #TODO: Filter galaxies by observations, but for now we'll just take a subset and fake it
context['superevent_id'] = sequence.nonlocalizedevent.event_id
context['superevent_index'] = sequence.nonlocalizedevent.id
+ existing_observations = db_session.query(photlco).filter(photlco.targetid==o4_galaxies.targetid).filter(o4_galaxies.event_id == sequence.nonlocalizedevent.event_id)
+
+ triplets=[]
+
+ for t in existing_observations:
+ if t.filetype==3:
+
+ diff_file = os.path.join(t.filepath, t.filename)
+
+ temp_filename = fits.getheader(diff_file)['TEMPLATE']
+ temp_filepath = [el.filepath for el in existing_observations if el.filename==temp_filename][0]
+
+ triplet={
+ #'galaxy': galaxy,
+ 'obsdate': t.dateobs,
+ 'filter': t.filter,
+ 'exposure_time': t.exptime,
+ 'original': {'filename': '.'.join(diff_file.split('.')[:-3])+'.fits'},
+ 'template': {'filename': os.path.join(temp_filepath, temp_filename)},
+ 'diff': {'filename': diff_file}
+ }
+
+ triplets.append(triplet)
+
rows = []
- #TODO: Populate this dynamically
- for galaxy in galaxies[:1]:
-
- row = {'galaxy': galaxy,
- 'triplets': [{
- 'obsdate': '2023-04-19',
- 'filter': 'g',
- 'exposure_time': 200,
- 'original': {'filename': os.path.join(BASE_DIR, 'data/fits/gw/obs.fits')},
- 'template': {'filename': os.path.join(BASE_DIR, 'data/fits/gw/ref.fits')},
- 'diff': {'filename': os.path.join(BASE_DIR, 'data/fits/gw/sub.fits')}
- },
- #{
- # 'obsdate': '2023-04-19',
- # 'filter': 'g',
- # 'exposure_time': 200,
- # 'original': {'filename': os.path.join(BASE_DIR, 'data/fits/gw/obs.fits')},
- # 'template': {'filename': os.path.join(BASE_DIR, 'data/fits/gw/ref.fits')},
- # 'diff': {'filename': os.path.join(BASE_DIR, 'data/fits/gw/sub.fits')}
- #}
- ]
- }
+ for galaxy in galaxies:
+ if len(triplets)!=0:
+ row = {
+ 'galaxy': galaxy,
+ 'triplets':triplets
+ }
+
+
+
rows.append(row)
context['rows'] = rows
return context
-
+#this is not yet implemented
class GWFollowupGalaxyTripletView(TemplateView, LoginRequiredMixin):
template_name = 'gw/galaxy_observations_individual.html'
@@ -157,6 +175,9 @@ def submit_galaxy_observations_view(request):
db_session = _return_session()
failed_obs = []
all_pointings = []
+
+ snex1_targets = []
+
with transaction.atomic():
for galaxy in galaxies:
newtarget, created = Target.objects.get_or_create(
@@ -166,6 +187,8 @@ def submit_galaxy_observations_view(request):
type='SIDEREAL'
)
+ snex1_targets.append([newtarget.id, galaxy.eventlocalization.nonlocalizedevent.event_id])
+
if created:
gw = Group.objects.get(name='GWO4')
assign_perm('tom_targets.view_target', gw, newtarget)
@@ -295,16 +318,18 @@ def submit_galaxy_observations_view(request):
record.parameters['name'] = snex_id
record.save()
- ### Log the target in SNEx1 and ingest template images
- run_hook('ingest_gw_galaxy_into_snex1',
- newtarget.id,
- galaxy.eventlocalization.nonlocalizedevent.event_id,
- wrapped_session=db_session)
-
### Submit pointing to TreasureMap
#pointings = build_tm_pointings(newtarget, observing_parameters)
#all_pointings += pointings
+
+ ### Log the target in SNEx1 and ingest template images
+ ### and download templates.
+ ### This is run asynchronously
+ search_templates_and_update_snex1.send(snex1_targets,
+ wrapped_session=db_session)
+
+
#submitted = submit_tm_pointings(galaxy.eventlocalization.sequences.first(), all_pointings)
#if not submitted:
diff --git a/requirements.txt b/requirements.txt
index 3e714e4c..aa9c68c4 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -8,6 +8,8 @@ gunicorn[gevent]
django==4.2
django-heroku
django-storages
+django_dramatiq
+redis
boto3
requests_oauthlib
python-magic
diff --git a/snex2/settings.py b/snex2/settings.py
index d88b3845..cf7e8678 100644
--- a/snex2/settings.py
+++ b/snex2/settings.py
@@ -44,6 +44,7 @@
'django.contrib.staticfiles',
'django.contrib.sites',
'django_extensions',
+ 'django_dramatiq',
'guardian',
'tom_common',
'django_comments',
@@ -286,7 +287,7 @@
FACILITIES = {
'LCO': {
'portal_url': 'https://observe.lco.global',
- 'api_key': os.environ['LCO_APIKEY'],
+ 'api_key': os.getenv('LCO_APIKEY'),
},
'GEM': {
'portal_url': {
@@ -549,6 +550,21 @@
}
]
+DRAMATIQ_BROKER = {
+ "BROKER": "dramatiq.brokers.redis.RedisBroker",
+ "OPTIONS": {
+ "url": "redis://127.0.0.1:6379"
+ },
+ "MIDDLEWARE": [
+ "dramatiq.middleware.AgeLimit",
+ "dramatiq.middleware.TimeLimit",
+ "dramatiq.middleware.Callbacks",
+ "dramatiq.middleware.Retries",
+ "django_dramatiq.middleware.DbConnectionsMiddleware",
+ ]
+}
+DRAMATIQ_AUTODISCOVER_MODULES = ["run_template_search"]
+
if DEBUG:
INTERNAL_IPS = [
'127.0.0.1',