From f4ead6f03a4ae9a52a3b0c9dd60fde2d7886f971 Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Thu, 7 Mar 2024 16:10:29 -0800 Subject: [PATCH 01/21] something --- .gitignore | 1 + custom_code/facilities/gemini_facility.py | 170 +++++++++++++++------- docker-compose.yml | 71 +++++++++ 3 files changed, 192 insertions(+), 50 deletions(-) diff --git a/.gitignore b/.gitignore index 06b6c7db..8eb88946 100644 --- a/.gitignore +++ b/.gitignore @@ -111,3 +111,4 @@ custom_code/brokers/queries/queries.json gw/gw_config.ini db_data +galaxy_database \ No newline at end of file diff --git a/custom_code/facilities/gemini_facility.py b/custom_code/facilities/gemini_facility.py index ccfd3a0a..40c46229 100644 --- a/custom_code/facilities/gemini_facility.py +++ b/custom_code/facilities/gemini_facility.py @@ -39,24 +39,53 @@ def get_site_code_from_program(program_id): class OpticalImagingForm(BaseObservationForm): + north_south_choice = ( + ('north', 'North'), + ('south', 'South'), + ) + n_or_s = forms.ChoiceField(choices=north_south_choice, initial='north', widget=forms.Select(), required=True, + label='') + window_size = forms.FloatField(initial=1.0, min_value=0.0, label='') max_airmass = forms.FloatField(min_value=1.0, max_value=5.0, initial=1.6, label='') # Optical imaging - optical_phot_exptime_choices = ( - (0, 0.0), - (100, 100.0), - (200, 200.0), - (300, 300.0), - (450, 450.0), - (600, 600.0), - (900, 900.0), +# optical_phot_exptime_choices = ( +# (0, 0.0), +# (100, 100.0), +# (200, 200.0), +# (300, 300.0), +# (450, 450.0), +# (600, 600.0), +# (900, 900.0), +# ) +# +# g_exptime = forms.ChoiceField(choices=optical_phot_exptime_choices, initial=0, widget=forms.Select(), required=True, label='') +# r_exptime = forms.ChoiceField(choices=optical_phot_exptime_choices, initial=0, widget=forms.Select(), required=True, label='') +# i_exptime = forms.ChoiceField(choices=optical_phot_exptime_choices, initial=0, widget=forms.Select(), required=True, label='') +# z_exptime = forms.ChoiceField(choices=optical_phot_exptime_choices, initial=0, widget=forms.Select(), required=True, label='') + + g_exptime = forms.FloatField(initial=0.0, min_value=0.0, + required=True, label='') + r_exptime = forms.FloatField(initial=0.0, min_value=0.0, + required=True, label='') + i_exptime = forms.FloatField(initial=0.0, min_value=0.0, + required=True, label='') + z_exptime = forms.FloatField(initial=0.0, min_value=0.0, + required=True, label='') + + filter_choices = ( + ('U','U'), + ('B','B'), + ('g','g'), + ('V','V'), + ('r','r'), + ('i','i'), ) - g_exptime = forms.ChoiceField(choices=optical_phot_exptime_choices, initial=0, widget=forms.Select(), required=True, label='') - r_exptime = forms.ChoiceField(choices=optical_phot_exptime_choices, initial=0, widget=forms.Select(), required=True, label='') - i_exptime = forms.ChoiceField(choices=optical_phot_exptime_choices, initial=0, widget=forms.Select(), required=True, label='') - z_exptime = forms.ChoiceField(choices=optical_phot_exptime_choices, initial=0, widget=forms.Select(), required=True, label='') + mag_approx = forms.FloatField(initial=18.0, min_value=0.0, label='') + mag_approx_filter = forms.ChoiceField(choices=filter_choices, initial='g', widget=forms.Select(), required=True, + label='') def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -72,6 +101,12 @@ def __init__(self, *args, **kwargs): PrependedText( 'max_airmass', 'Airmass <' ), + Div( + Div(PrependedText('mag_approx', 'Approximate mag: '),css_class='col-md-8'), + Div(HTML('

Filter:

'),css_class='col-md-2'), + Div('mag_approx_filter', css_class='col-md-2'), + css_class='form-row' + ), Div( Div(HTML('

Filter

'),css_class='col-md-2'), Div(HTML('

Exposure time (s)

'),css_class='col-md-10'), css_class='form-row', @@ -92,6 +127,13 @@ def __init__(self, *args, **kwargs): Div(HTML('

z

'),css_class='col-md-2'), Div('z_exptime',css_class='col-md-10'), css_class='form-row' ), + Div( + Div(HTML('

Status

'), css_class='col-md-12'), + css_class='form-row', + ), + Div( + Div('ready', css_class='col-md-12'), css_class='form-row' + ), ), css_class='col-md-8' ), css_class='row justify-content-md-center' ) @@ -108,13 +150,21 @@ def is_valid(self): def _init_observation_payload(self, target): - wait = True #On Hold + #wait = True #On Hold coords = SkyCoord(ra=target.ra*u.degree, dec=target.dec*u.degree) now = datetime.utcnow() sn_name = target.name + + if self.data['n_or_s'] == 'north': + prog = os.getenv('GEMINI_NORTH_PROGRAMID') + pwd = os.getenv('GEMINI_NORTH_PASSWORD') + elif self.data['n_or_s'] == 'south': + prog = os.getenv('GEMINI_SOUTH_PROGRAMID') + pwd = os.getenv('GEMINI_SOUTH_PASSWORD') payload = { - 'ready': str(not wait).lower(), + #'ready': str(not wait).lower(), + 'ready': self.data['ready'], 'prog': os.getenv('GEMINI_PROGRAMID',''), 'email': os.getenv('GEMINI_EMAIL',''), 'password': os.getenv('GEMINI_PASSWORD',''), @@ -128,7 +178,7 @@ def _init_observation_payload(self, target): 'elevationType': 'airmass', 'elevationMin': 1.0, 'elevationMax': str(self.data['max_airmass']).strip(), - 'note': 'API Test', + 'note': 'This observation was submitted through the API, if anything is unusual contact cmccully@lco.global', 'posangle': 90. } @@ -174,12 +224,12 @@ class OpticalSpectraForm(BaseObservationForm): window_size = forms.FloatField(initial=1.0, min_value=0.0, label='') max_airmass = forms.FloatField(min_value=1.0, max_value=5.0, initial=1.6, label='') - optical_spec_exptime_choices = ( - (0, 0.0), - (1200, 1200.0), - (1500, 1500.0), - (1700, 1700.0), - ) +# optical_spec_exptime_choices = ( +# (0, 0.0), +# (1200, 1200.0), +# (1500, 1500.0), +# (1700, 1700.0), +# ) optical_spec_slit_choices = ( (1, '1.0"'), @@ -248,7 +298,7 @@ def __init__(self, *args, **kwargs): css_class='form-row', ), Div( - Div(HTML('

B600/500nm

'), css_class='col-md-4'), + Div(HTML('

B480/500nm

'), css_class='col-md-4'), Div('b_exptime', css_class='col-md-8'), css_class='form-row' ), Div( @@ -283,7 +333,7 @@ def is_valid(self): return not errors def _init_observation_payload(self, target): - wait = True #On Hold + #wait = True #On Hold coords = SkyCoord(ra=target.ra*u.degree, dec=target.dec*u.degree) now = datetime.utcnow() sn_name = target.name @@ -324,55 +374,75 @@ def observation_payload(self): if self.data['n_or_s'] == 'north': if self.data['slit'] == '1.5': obsid_map = { - 'B600': { - 'arc': '68', - 'acquisition': '66', - 'science_with_flats': '67', +# 'B600': { +# 'arc': '68', +# 'acquisition': '66', +# 'science_with_flats': '67', +# }, + 'B480': { + 'arc': '104', + 'acquisition': '102', + 'science_with_flats': '103', }, 'R400': { - 'arc': '74', - 'acquisition': '72', - 'science_with_flats': '73', + 'arc': '110', + 'acquisition': '108', + 'science_with_flats': '109', } } elif self.data['slit'] == '1': obsid_map = { - 'B600': { - 'arc': '65', - 'acquisition': '63', - 'science_with_flats': '64', +# 'B600': { +# 'arc': '65', +# 'acquisition': '63', +# 'science_with_flats': '64', +# }, + 'B480': { + 'arc': '101', + 'acquisition': '99', + 'science_with_flats': '100', }, 'R400': { - 'arc': '71', - 'acquisition': '69', - 'science_with_flats': '70', + 'arc': '107', + 'acquisition': '105', + 'science_with_flats': '106', } } elif self.data['n_or_s'] == 'south': if self.data['slit'] == '1.5': obsid_map = { 'B600': { - 'arc': '64', - 'acquisition': '65', - 'science_with_flats': '66', + 'arc': '56', + 'acquisition': '57', + 'science_with_flats': '58', + }, + 'B480': { + 'arc': '93', + 'acquisition': '94', + 'science_with_flats': '95', }, 'R400': { - 'arc': '70', - 'acquisition': '71', - 'science_with_flats': '72', + 'arc': '62', + 'acquisition': '63', + 'science_with_flats': '64', } } elif self.data['slit'] == '1': obsid_map = { 'B600': { - 'arc': '61', - 'acquisition': '62', - 'science_with_flats': '63', + 'arc': '53', + 'acquisition': '54', + 'science_with_flats': '55', + }, + 'B480': { + 'arc': '90', + 'acquisition': '91', + 'science_with_flats': '92', }, 'R400': { - 'arc': '67', - 'acquisition': '68', - 'science_with_flats': '69', + 'arc': '59', + 'acquisition': '60', + 'science_with_flats': '61', } } @@ -395,7 +465,7 @@ def observation_payload(self): payloads[f'{color_grating}_{key}']['target'] = 'Acquisition' elif key == 'science_with_flats': payloads[f'{color_grating}_{key}']['target'] = target.name - if color_grating == 'B600': + if color_grating in ['B600', 'B480']: payloads[f'{color_grating}_{key}']['exptime'] = int(float(self.data['b_exptime'])/2) elif color_grating == 'R400': payloads[f'{color_grating}_{key}']['exptime'] = int(float(self.data['r_exptime'])/2) diff --git a/docker-compose.yml b/docker-compose.yml index c2d51029..ffc7f94e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -11,8 +11,13 @@ services: volumes: - ${SNEX2_DB_DATA_PATH}:/var/lib/postgresql/data ports: +<<<<<<< HEAD - 5435:5432 #platform: linux/amd64 +======= + - 5435:5432/tcp + platform: linux/amd64 +>>>>>>> 4040213 (something) snex2: image: lcogt/snex2:latest network_mode: bridge @@ -20,6 +25,7 @@ services: - "snex2-db:snex2-db" ports: - 8889:8080 + platform: linux/amd64 restart: "always" mem_limit: "4g" logging: @@ -75,6 +81,7 @@ services: - ${SNEX_MUSCAT_FITS_PATH}:/snex2/data/fits/2m0a/ - ${SNEX_GW_FITS_PATH}:/snex2/data/fits/gw/ - ${SNEX_FLOYDS_PATH}:/snex2/data/floyds/ +<<<<<<< HEAD - ${SNEX_FLOYDS_WEB_PATH}:/snex2/data/WEB/floyds/ snex2-dev: image: snex2-dev:latest @@ -139,3 +146,67 @@ services: - ${SNEX_GW_FITS_PATH}:/snex2/data/fits/gw/ - ${SNEX_FLOYDS_PATH}:/snex2/data/floyds/ - ${SNEX_FLOYDS_WEB_PATH}:/snex2/data/WEB/floyds/ +======= +# snex2-dev: +# # image: snex2-dev:latest +# network_mode: bridge +# links: +# - "snex2-db:snex2-db" +# ports: +# - 8890:8080 +# restart: "always" +# mem_limit: "12g" +# logging: +# options: +# max-file: "3" +# max-size: "10m" +# environment: +# - DB_NAME=snex2-dev +# - DB_HOST=snex2-db +# - DB_PORT=5432 +# - SNEX2_USER=${SNEX2_USER} +# - SNEX2_DB_USER=${SNEX2_DB_USER} +# - SNEX2_DB_PASSWORD=${SNEX2_DB_PASSWORD} +# - SNEX1_DB_USER=${SNEX1_DB_USER} +# - SNEX1_DB_PASSWORD=${SNEX1_DB_PASSWORD} +# - SNEX2_DB_BACKEND=postgres +# - LCO_APIKEY=${SNEX2_LCO_APIKEY} +# - SNEXBOT_APIKEY=${SNEX2_SNEXBOT_APIKEY} +# - AWS_ACCESS_KEY_ID=${SNEX2_AWS_ACCESS_KEY_ID} +# - AWS_S3_REGION_NAME=${SNEX2_AWS_REGION_NAME} +# - AWS_SECRET_ACCESS_KEY=${SNEX2_AWS_SECRET_ACCESS_KEY} +# - AWS_STORAGE_BUCKET_NAME=${SNEX2_AWS_STORAGE_BUCKET_NAME} +# - TWITTER_APIKEY=${SNEX2_TWITTER_APIKEY} +# - TWITTER_SECRET=${SNEX2_TWITTER_SECRET} +# - TWITTER_ACCESSTOKEN=${SNEX2_TWITTER_ACCESSTOKEN} +# - TWITTER_ACCESSSECRET=${SNEX2_TWITTER_ACCESSSECRET} +# - GEMINI_EMAIL=${SNEX2_GEMINI_EMAIL} +# - GEMINI_SOUTH_PROGRAMID=${SNEX2_GEMINI_SOUTH_PROGRAMID} +# - GEMINI_SOUTH_PASSWORD=${SNEX2_GEMINI_SOUTH_PASSWORD} +# - GEMINI_SOUTH_SERVER=${SNEX2_GEMINI_SOUTH_SERVER} +# - GEMINI_NORTH_PROGRAMID=${SNEX2_GEMINI_NORTH_PROGRAMID} +# - GEMINI_NORTH_PASSWORD=${SNEX2_GEMINI_NORTH_PASSWORD} +# - GEMINI_NORTH_SERVER=${SNEX2_GEMINI_NORTH_SERVER} +# - SNEX_EMAIL_PASSWORD=${SNEX_EMAIL_PASSWORD} +# - TNS_APIKEY=${TNS_APIKEY} +# - TNS_APIID=${TNS_APIID} +# - SWIFT_USERNAME=${SWIFT_USERNAME} +# - SWIFT_SECRET=${SWIFT_SECRET} +# - LASAIR_IRIS_TOKEN=${LASAIR_IRIS_TOKEN} +# - SCIMMA_AUTH_USERNAME=${SCIMMA_AUTH_USERNAME} +# - SCIMMA_AUTH_PASSWORD=${SCIMMA_AUTH_PASSWORD} +# - GCN_CLASSIC_CLIENT_ID=${GCN_CLASSIC_CLIENT_ID} +# - GCN_CLASSIC_CLIENT_SECRET=${GCN_CLASSIC_CLIENT_SECRET} +# - CREDENTIAL_USERNAME=${CREDENTIAL_USERNAME} +# - CREDENTIAL_PASSWORD=${CREDENTIAL_PASSWORD} +# - TM_TOKEN=${TM_TOKEN} +# - HERMES_BASE_URL=${HERMES_BASE_URL} +# volumes: +# - ${SNEX_THUMBNAIL_PATH}:/snex2/data/thumbs/ +# - ${SNEX_FITS_PATH}:/snex2/data/fits/ +# - ${SNEX_0m4_FITS_PATH}:/snex2/data/fits/0m4/ +# - ${SNEX_2m_FITS_PATH}:/snex2/data/fits/fts/ +# - ${SNEX_MUSCAT_FITS_PATH}:/snex2/data/fits/2m0a/ +# - ${SNEX_GW_FITS_PATH}:/snex2/data/fits/gw/ +# - ${SNEX_FLOYDS_PATH}:/snex2/data/floyds/ +>>>>>>> 4040213 (something) From c50214034f1e9eb1d2a70cc3f7c45d7d89e2bea9 Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Thu, 21 Mar 2024 22:50:07 -0700 Subject: [PATCH 02/21] Added functionlity to automatically fetch templates to be subtracted --- .gitignore | 3 ++ gw/hooks.py | 29 ++++++++--- gw/survey_queries.py | 118 +++++++++++++++++++++++++++++++++++++++++++ gw/views.py | 13 +++-- 4 files changed, 149 insertions(+), 14 deletions(-) create mode 100644 gw/survey_queries.py diff --git a/.gitignore b/.gitignore index 8eb88946..5062af75 100644 --- a/.gitignore +++ b/.gitignore @@ -109,6 +109,9 @@ venv.bak/ src/ custom_code/brokers/queries/queries.json gw/gw_config.ini +<<<<<<< HEAD db_data +======= +>>>>>>> 2cdcce4 (Added functionlity to automatically fetch templates to be subtracted) galaxy_database \ No newline at end of file diff --git a/gw/hooks.py b/gw/hooks.py index ee8aae03..f7173f81 100644 --- a/gw/hooks.py +++ b/gw/hooks.py @@ -1,12 +1,21 @@ import os -from gw.models import GWFollowupGalaxies +from models import GWFollowupGalaxy from tom_common.hooks import run_hook from tom_targets.models import Target, TargetExtra from tom_nonlocalizedevents.models import EventSequence -from custom_code.views import cancel_observation, Snex1ConnectionError -from custom_code.hooks import _return_session, _load_table +from ..custom_code.views import cancel_observation, Snex1ConnectionError +from ..custom_code.hooks import _return_session, _load_table +from tom_observations.models import ObservationRecord import logging +<<<<<<< HEAD from django.conf import settings +======= +<<<<<<< Updated upstream +======= +from django.conf import settings +import survey_queries +>>>>>>> Stashed changes +>>>>>>> 2cdcce4 (Added functionlity to automatically fetch templates to be subtracted) logger = logging.getLogger(__name__) @@ -23,7 +32,7 @@ def cancel_gw_obs(galaxy_ids=[], sequence_id=None, wrapped_session=None): return if galaxy_ids: - galaxies = GWFollowupGalaxies.objects.filter(id__in=galaxy_ids) + galaxies = GWFollowupGalaxy.objects.filter(id__in=galaxy_ids) elif sequence_id: sequence = EventSequence.objects.get(id=sequence_id) @@ -76,9 +85,9 @@ def ingest_gw_galaxy_into_snex1(target_id, event_id, wrapped_session=None): db_session = wrapped_session else: - db_session = _return_session(_snex1_address) + db_session = _return_session(settings.SNEX1_DB_URL) - o4_galaxies = _load_table('o4_galaxies', db_address=_snex1_address) + o4_galaxies = _load_table('o4_galaxies', db_address=settings.SNEX1_DB_URL) existing_target = db_session.query(o4_galaxies).filter(o4_galaxies.targetid==target_id) if existing_target.count() > 0: @@ -101,7 +110,13 @@ def ingest_gw_galaxy_into_snex1(target_id, event_id, wrapped_session=None): ra0 = snex2_target.ra dec0 = snex2_target.dec - db_session.add(o4_galaxies(targetid=target_id, event_id=event_id, ra0=ra0, dec0=dec0)) + templ = survey_queries.survey_request(ra0, dec0, 'gri') + templ.search_for_PS1_urls() + templ.search_for_Skymapper_urls() + templ.search_for_DECam_urls() + templ.fetch_urls() + + db_session.add(o4_galaxies(targetid=target_id, event_id=event_id, ra0=ra0, dec0=dec0, **templ.templates_paths)) if not wrapped_session: try: diff --git a/gw/survey_queries.py b/gw/survey_queries.py new file mode 100644 index 00000000..92d20ca7 --- /dev/null +++ b/gw/survey_queries.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +"""survey_queries.py + +""" + +from astropy.table import Table +from astropy.io.votable import parse +from astropy import units as u +import urllib.request, io +from numpy.core.defchararray import startswith +from pyvo.dal import sia + +#Folder definitions +OUT_FOLDER = '/Users/giacomoterreran/GW_templates/O4' #folder where images will be stored +#OUT_FOLDER = '/supernova/data/extdata/GW_templates/O4' + +#PS1 urls +PS1_TAB = 'https://ps1images.stsci.edu/cgi-bin/ps1filenames.py' +PS1_CUT = 'https://ps1images.stsci.edu/cgi-bin/fitscut.cgi' + +#List of available services at: +#https://datalab.noirlab.edu/docs/manual/UsingAstroDataLab/DataAccessInterfaces/SimpleImageAccessSIA/SimpleImageAccessSIA.html +COLLECTION = 'coadd_all' +DECAM_SERVICE = f'https://datalab.noirlab.edu/sia/{COLLECTION}' + +#Skymapper urls +SKY_CUT = 'https://api.skymapper.nci.org.au/public/siap/dr4/query' + +WIDTH_RA = 10/60 #deg +WIDTH_DEC = 10/60 #deg + +class survey_request: + ''' + This class will query the surveys looking for the optical templates. + Each function is associated with a survey. First it will check if + the coordinates are in the footprint of that survey. Each function + will return True or False accordingly. In addition, if the coordinates + are indeed in the footprint, then the function will store the hdu of + the template in the 'hdu' attribute. + + ''' + def __init__(self, _ra, _dec, _filters): + self.ra0 = _ra #deg + self.dec0 = _dec #deg + self.filters = _filters + self.urls = {} + self.templates_paths = {} + + def search_for_PS1_urls(self): + ''' + PS1 needs the size in pixels, considering 0.25 arcsec/pixel. + + ''' + + self.urls['PS1']={'g':'--', 'r':'--', 'i':'--'} + + tableurl = f'{PS1_TAB}?ra={self.ra0}&dec={self.dec0}&size={3600*WIDTH_RA/0.25}&format=fits&filters={self.filters}' + table = Table.read(tableurl, format='ascii') + for i in range(len(table)): + flt = table[i]['filter'] + filename = table[i]['filename'] + self.urls['PS1'][flt] = f'{PS1_CUT}?ra={self.ra0}&dec={self.dec0}&size=2500&format=fits&red={filename}' + + + + def search_for_Skymapper_urls(self): + ''' + Skymapper has the ability to search for multiple filters in one go, + but the returned list of file loses the information on the filter, + so I need to send a seperate quary for each filter. + + ''' + + self.urls['Skymapper']={'g':'--', 'r':'--', 'i':'--'} + + for flt in self.filters: + + url = f'{SKY_CUT}?POS={self.ra0},{self.dec0}&SIZE={WIDTH_RA},{WIDTH_DEC}&BAND={flt}&FORMAT=image/fits&INTERSECT=covers&RESPONSEFORMAT=VOTABLE' + + u = urllib.request.urlopen(url) + s = io.BytesIO(u.read()) + votable = parse(s) + + for resource in votable.resources: + for t in resource.tables: + table = t.array['get_fits'][:] + + if len(table) != 0: + self.urls['Skymapper'][flt] = table[0] + + + def search_for_DECam_urls(self): + + self.urls['DECam']={'g':'--', 'r':'--', 'i':'--'} + + connect = sia.SIAService(DECAM_SERVICE) + table = connect.search(pos = (self.ra0,self.dec0), size = (WIDTH_RA, WIDTH_DEC), verbosity=2).to_table() + + for flt in self.filters: + + sel = (table['prodtype'] == 'image') & (startswith(table['obs_bandpass'].astype(str), flt)) + + if len(table[sel]) != 0: + self.urls['DECam'][flt] = table[sel]['access_url'][0] + + def fetch_urls(self): + ''' + It will also store the templates paths in a dictionary + ready to be parsed by function populating the table + ''' + for survey in self.urls: + for flt in self.urls[survey]: + if self.urls[survey][flt] == '--': + self.templates_paths[f'{survey}_{flt}'] = '--' + else: + out_name = f'{OUT_FOLDER}/{survey}/{self.ra0:.4f}_{self.dec0:.4f}_{flt}.fits' + urllib.request.urlretrieve(self.urls[survey][flt], out_name) + self.templates_paths[f'{survey}_{flt}'] = out_name diff --git a/gw/views.py b/gw/views.py index 8f9d9318..8dc3ef4b 100644 --- a/gw/views.py +++ b/gw/views.py @@ -1,4 +1,3 @@ -from django.shortcuts import render from django.conf import settings from django.http import HttpResponse from django.db import transaction @@ -13,16 +12,16 @@ from astropy.io import fits import sep from datetime import datetime, timedelta -from tom_nonlocalizedevents.models import NonLocalizedEvent, EventSequence, EventLocalization -from gw.models import GWFollowupGalaxy -from gw.forms import GWGalaxyObservationForm -from gw.treasure_map_utils import build_tm_pointings, submit_tm_pointings +from tom_nonlocalizedevents.models import EventSequence +from models import GWFollowupGalaxy +from forms import GWGalaxyObservationForm +from treasure_map_utils import build_tm_pointings, submit_tm_pointings from tom_common.hooks import run_hook from tom_targets.models import Target, TargetExtra from tom_observations.facility import get_service_class from tom_observations.models import ObservationRecord, ObservationGroup, DynamicCadence -from custom_code.hooks import _return_session -from custom_code.views import Snex1ConnectionError +from ..custom_code.hooks import _return_session +from ..custom_code.views import Snex1ConnectionError import logging logger = logging.getLogger(__name__) From 0d46b94e21e9cd96bd39e3ffd8f3617bdea99822 Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Thu, 21 Mar 2024 23:09:19 -0700 Subject: [PATCH 03/21] fixing hooks --- gw/hooks.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/gw/hooks.py b/gw/hooks.py index f7173f81..31bb76be 100644 --- a/gw/hooks.py +++ b/gw/hooks.py @@ -7,15 +7,8 @@ from ..custom_code.hooks import _return_session, _load_table from tom_observations.models import ObservationRecord import logging -<<<<<<< HEAD -from django.conf import settings -======= -<<<<<<< Updated upstream -======= from django.conf import settings import survey_queries ->>>>>>> Stashed changes ->>>>>>> 2cdcce4 (Added functionlity to automatically fetch templates to be subtracted) logger = logging.getLogger(__name__) From 886d01911539881e79c1f526e4bd17b0b6406c89 Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Tue, 2 Apr 2024 11:35:24 -0700 Subject: [PATCH 04/21] Changed templated folder to be defined by the environmental viariable GWTEMPLATES_FOLDER --- gw/survey_queries.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/gw/survey_queries.py b/gw/survey_queries.py index 92d20ca7..0d68aea6 100644 --- a/gw/survey_queries.py +++ b/gw/survey_queries.py @@ -9,9 +9,10 @@ import urllib.request, io from numpy.core.defchararray import startswith from pyvo.dal import sia +import os #Folder definitions -OUT_FOLDER = '/Users/giacomoterreran/GW_templates/O4' #folder where images will be stored +OUT_FOLDER = os.getenv('GWTEMPLATES_FOLDER') #OUT_FOLDER = '/supernova/data/extdata/GW_templates/O4' #PS1 urls From c0eff4f6675f108e660dbaaa880f602714d3e8fa Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Tue, 2 Apr 2024 11:46:35 -0700 Subject: [PATCH 05/21] Changed DECam COLLECTION to ls_dr9 --- gw/survey_queries.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/gw/survey_queries.py b/gw/survey_queries.py index 0d68aea6..5b9aa1ea 100644 --- a/gw/survey_queries.py +++ b/gw/survey_queries.py @@ -21,7 +21,8 @@ #List of available services at: #https://datalab.noirlab.edu/docs/manual/UsingAstroDataLab/DataAccessInterfaces/SimpleImageAccessSIA/SimpleImageAccessSIA.html -COLLECTION = 'coadd_all' +#COLLECTION = 'coadd_all' +COLLECTION = 'ls_dr9' DECAM_SERVICE = f'https://datalab.noirlab.edu/sia/{COLLECTION}' #Skymapper urls From cfa8627ebc2c9447e4b70180548455b3373fdffa Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Wed, 17 Apr 2024 20:28:57 -0700 Subject: [PATCH 06/21] Added full functionality of SDSS, including variance image, but the rest is probably broken now --- gw/survey_queries.py | 453 +++++++++++++++++++++++++++++++++++++++---- 1 file changed, 416 insertions(+), 37 deletions(-) diff --git a/gw/survey_queries.py b/gw/survey_queries.py index 5b9aa1ea..4eb5b5ac 100644 --- a/gw/survey_queries.py +++ b/gw/survey_queries.py @@ -6,10 +6,17 @@ from astropy.table import Table from astropy.io.votable import parse from astropy import units as u +import astropy.io.fits as pf +from astropy.time import Time +from astroquery.sdss import SDSS +from astropy.coordinates import SkyCoord +import astropy.units as u import urllib.request, io from numpy.core.defchararray import startswith from pyvo.dal import sia import os +import numpy as np +from astropy.wcs import WCS #Folder definitions OUT_FOLDER = os.getenv('GWTEMPLATES_FOLDER') @@ -28,8 +35,8 @@ #Skymapper urls SKY_CUT = 'https://api.skymapper.nci.org.au/public/siap/dr4/query' -WIDTH_RA = 10/60 #deg -WIDTH_DEC = 10/60 #deg +FOV = 26.5*u.arcmin #Sinistro FOV is 26.5'x26.5' +RESOLUTION = 0.389*u.arcsec class survey_request: ''' @@ -41,31 +48,149 @@ class survey_request: the template in the 'hdu' attribute. ''' - def __init__(self, _ra, _dec, _filters): - self.ra0 = _ra #deg - self.dec0 = _dec #deg + def __init__(self, _obj, _coord, _filters): + self.obj = _obj + self.coord = _coord self.filters = _filters - self.urls = {} self.templates_paths = {} - def search_for_PS1_urls(self): + + def set_survey(_survey): + def decorate(function): + def initialize_survey(self): + for flt in self.filters: + self.templates_paths[f'{_survey}_{flt}'] = '--' + return function(self, _survey) + return initialize_survey + return decorate + + +####### ---PS1--- ####### + @set_survey('PS1') + def search_for_PS1(self, survey): ''' PS1 needs the size in pixels, considering 0.25 arcsec/pixel. ''' - self.urls['PS1']={'g':'--', 'r':'--', 'i':'--'} - - tableurl = f'{PS1_TAB}?ra={self.ra0}&dec={self.dec0}&size={3600*WIDTH_RA/0.25}&format=fits&filters={self.filters}' + tableurl = f'{PS1_TAB}?ra={self.coord.ra.deg}&dec={self.coord.dec.deg}&size={FOV.to(u.arcsec)/0.25}&format=fits&filters={self.filters}' table = Table.read(tableurl, format='ascii') - for i in range(len(table)): - flt = table[i]['filter'] - filename = table[i]['filename'] - self.urls['PS1'][flt] = f'{PS1_CUT}?ra={self.ra0}&dec={self.dec0}&size=2500&format=fits&red={filename}' + for tab in table: + flt = tab['filter'] + filename = tab['filename'] + url = f'{PS1_CUT}?ra={self.coord.ra.deg}&dec={self.coord.dec.deg}&size=2500&format=fits&red={filename}' + out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.fits' + urllib.request.urlretrieve(url, out_name) + self.templates_paths[f'{survey}_{flt}'] = out_name + with pf.open(out_name, mode='update') as hdu: + date_obs = Time(hdu[0].header['MJD-OBS'], format='mjd', scale='utc').iso.split() + hdu[0].header['DATE-OBS'] = 'T'.join(date_obs) + hdu[0].header['DAY-OBS'] = date_obs[0] + hdu[0].header['FILTER'] = flt+'p' #note, this works only with griz + hdu[0].header['TELESCOP'] = 'PS1' + hdu[0].header['INSTRUME'] = 'GPC1' + hdu[0].header['OBJECT'] = self.obj + hdu[0].header['WCSERR'] = 0 + hdu[0].header['RA'] = self.coord.ra.deg + hdu[0].header['DEC'] = self.coord.dec.deg + hdu.flush() + + contained_by(self.wcs) + + +####### ---SDSS--- ####### + @set_survey('SDSS') + def search_for_SDSS(self, survey): + ''' + Each tile has a 10 by 13 arcminutes FOV. + ''' - def search_for_Skymapper_urls(self): + grid = generate_FOV_grid(self.coord, FOV) + + all_pointings = [] + + for f,flt in enumerate(self.filters): + + img_data_list = [] + weight_data_list = [] + + # For the first filter, we need to collect the references for all the required SDSS fields + # We don't need to repeat the query for the following filters + # The SDSS fields references are stored in `all_pointings` + if f == 0: + while True: + # SDSS queries radius must be less than 3.0 arcmin. + # initially `grid[0]` will be the center of the field. + # After each iteration, the `grid` array is trimmed down and its first element will be + # the point on the grid most distant from the center + xid = SDSS.query_region(grid[0], radius=3*u.arcmin, spectro=False) + if xid is None: + return + + new_pointings = [list(el) for el in np.unique(xid['run','camcol','field']) if list(el) not in all_pointings] + + all_pointings = all_pointings + new_pointings + + for run, camcol, field in new_pointings: + #fetching the fits as an astropy hdu + im = SDSS.get_images(run=run, camcol=camcol, field=field, band=flt, cache=True)[0] + + sky_subtracted_original_image_hdu, weight_image_hdu = elaborate_SDSS(im, flt, run, camcol, field) + img_data_list.append(sky_subtracted_original_image_hdu) + weight_data_list.append(weight_image_hdu) + + + for tile in img_data_list: + grid = [p for p in grid if not p.contained_by(WCS(tile.header))] + + if len(grid) == 0: + break + + else: + #Note that we changed the list to all_pointings now that we have them all + for run, camcol, field in all_pointings: + #fetching the fits as an astropy hdu + im = SDSS.get_images(run=run, camcol=camcol, field=field, band=flt, cache=True)[0] + + sky_subtracted_original_image_hdu, weight_image_hdu = elaborate_SDSS(im, flt, run, camcol, field) + img_data_list.append(sky_subtracted_original_image_hdu) + weight_data_list.append(weight_image_hdu) + + + array, footprint, header = reproject_and_combine(img_data_list, weight_data_list) + + header['NTILES'] = (len(img_data_list), 'Number of SDSS tiles merged') + for i,im in enumerate(img_data_list, start=1): + header[f'RUN{i:>03}'] = (im.header['RUN'], f'Run number of tile {i}') + header[f'CAMCO{i:>03}'] = (im.header['CAMCOL'], f'Column in the imaging camera of tile {i}') + header[f'FIELD{i:>03}'] = (im.header['FIELD'], f'Field number of tile {i}') + header[f'MJD{i:>03}'] = (im.header['MJD-OBS'], f'MJD of tile {i}') + + + header['MJD-OBS'] = (np.mean([im.header['MJD-OBS'] for im in img_data_list]), 'Average MJD') + header['DATE-OBS'] = 'T'.join(Time(header['MJD-OBS'], format='mjd', scale='utc').iso.split()) + header['DAY-OBS'] = header['DATE-OBS'].split('T')[0] + header['TELESCOP'] = '2.5m' + header['INSTRUME'] = 'SDSS' + header['OBJECT'] = self.obj + header['FILTER'] = flt+'p' #note, this works only with griz + header['AIRMASS'] = 1 + header['WCSERR'] = 0 + header['BUNIT'] = ('counts', 'ADUs') + pf.writeto(f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.fits', array, header, overwrite=True) + + #hotpants needs the variance, so we need the writeout 1/footprint + header['BUNIT'] = ('counts2', 'ADUs^2') + pf.writeto(f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.var.fits', 1/footprint, header, overwrite=True) + + + + +####### ---Skymapper--- ####### + @set_survey('Skymapper') + def search_for_Skymapper(self, survey): ''' Skymapper has the ability to search for multiple filters in one go, but the returned list of file loses the information on the filter, @@ -73,11 +198,9 @@ def search_for_Skymapper_urls(self): ''' - self.urls['Skymapper']={'g':'--', 'r':'--', 'i':'--'} - for flt in self.filters: - url = f'{SKY_CUT}?POS={self.ra0},{self.dec0}&SIZE={WIDTH_RA},{WIDTH_DEC}&BAND={flt}&FORMAT=image/fits&INTERSECT=covers&RESPONSEFORMAT=VOTABLE' + url = f'{SKY_CUT}?POS={self.coord.ra.deg},{self.coord.dec.deg}&SIZE={WIDTH_RA},{WIDTH_DEC}&BAND={flt}&FORMAT=image/fits&INTERSECT=covers&RESPONSEFORMAT=VOTABLE' u = urllib.request.urlopen(url) s = io.BytesIO(u.read()) @@ -88,33 +211,289 @@ def search_for_Skymapper_urls(self): table = t.array['get_fits'][:] if len(table) != 0: - self.urls['Skymapper'][flt] = table[0] - + out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.fits' + urllib.request.urlretrieve(table[0], out_name) + self.templates_paths[f'{survey}_{flt}'] = out_name + with pf.open(out_name, mode='update') as hdu: + hdu[0].header['DAY-OBS'] = hdu[0].header['DATE-OBS'].split('T')[0] + hdu[0].header['FILTER'] = flt+'p' #note, this works only with griz + hdu[0].header['OBJECT'] = self.obj + hdu[0].header['WCSERR'] = 0 + hdu[0].header['RA'] = self.coord.ra.deg + hdu[0].header['DEC'] = self.coord.dec.deg - def search_for_DECam_urls(self): + hdu.flush() + + - self.urls['DECam']={'g':'--', 'r':'--', 'i':'--'} +####### ---DECam--- ####### + @set_survey('DECam') + def search_for_DECam(self, survey): connect = sia.SIAService(DECAM_SERVICE) - table = connect.search(pos = (self.ra0,self.dec0), size = (WIDTH_RA, WIDTH_DEC), verbosity=2).to_table() + table = connect.search(pos = (self.coord.ra.deg,self.coord.dec.deg), size = (WIDTH_RA, WIDTH_DEC), verbosity=2).to_table() for flt in self.filters: sel = (table['prodtype'] == 'image') & (startswith(table['obs_bandpass'].astype(str), flt)) if len(table[sel]) != 0: - self.urls['DECam'][flt] = table[sel]['access_url'][0] + out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.fits' + urllib.request.urlretrieve(table[sel]['access_url'][0], out_name) + self.templates_paths[f'{survey}_{flt}'] = out_name + with pf.open(out_name, mode='update') as hdu: + hdu[0].header['MJD-OBS'] = hdu[0].header['MJD_MEAN'] + hdu[0].header['DATE-OBS'] = hdu[0].header['DATEOBS'] + hdu[0].header['DAY-OBS'] = hdu[0].header['DATE-OBS'].split('T')[0] + hdu[0].header['FILTER'] = flt+'p' #note, this works only with griz + hdu[0].header['OBJECT'] = self.obj + hdu[0].header['WCSERR'] = 0 + hdu[0].header['RA'] = self.coord.ra.deg + hdu[0].header['DEC'] = self.coord.dec.deg + + hdu.flush() - def fetch_urls(self): - ''' - It will also store the templates paths in a dictionary - ready to be parsed by function populating the table - ''' - for survey in self.urls: - for flt in self.urls[survey]: - if self.urls[survey][flt] == '--': - self.templates_paths[f'{survey}_{flt}'] = '--' - else: - out_name = f'{OUT_FOLDER}/{survey}/{self.ra0:.4f}_{self.dec0:.4f}_{flt}.fits' - urllib.request.urlretrieve(self.urls[survey][flt], out_name) - self.templates_paths[f'{survey}_{flt}'] = out_name + +def get_SDSS_gain_and_darkVariance(_flt, _run, _camcol): + #retrieve gain and darkVariance from another online table + q = SDSS.query_sql(f'SELECT DISTINCT gain_{_flt}, darkVariance_{_flt} FROM field WHERE run={_run} AND camcol={_camcol}') + gain = q[f'gain_{_flt}'][0] + darkVariance = q[f'darkVariance_{_flt}'][0] + return gain, darkVariance + +def elaborate_SDSS(_im, _flt, _run, _camcol, _field): + header = _im[0].header + + #Although we do not write out each single tile, this edits in the header will + #make it easier to keep track of each tile in the merged fits file + header['RADESYSa'] = header['RADECSYS'] + del header['RADECSYS'] + header['MJD-OBS'] = Time(f"{header['DATE-OBS']} {header['TAIHMS']}", format='iso', scale='tai').mjd + header['DATE-OBS'] = 'T'.join(Time(header['MJD-OBS'], format='mjd', scale='utc').iso.split()) + #For some reason the field info is not stored in the header. There is a "FRAME" key, but it's different + header['FIELD'] = _field + + gain, darkVariance = get_SDSS_gain_and_darkVariance(_flt, _run, _camcol) + + #im[2] contains the sky + sky_image = SDSS_sky(_im[2]) #in ADU + + #im[1] contains a row with factors nanomaggies per counts for each column + calib_image = np.asarray([_im[1].data.tolist()]*np.shape(_im[0].data)[0]) + + sky_subtracted_original_image = _im[0].data / calib_image #in ADU + + variance_image = ((_im[0].data / calib_image + sky_image) / gain + darkVariance)# in ADU * calib_image **2 + + sky_subtracted_original_image_hdu = pf.PrimaryHDU(sky_subtracted_original_image, header=header) + #to reproject and combine, we just need the weight, not the variance + weight_image_hdu = pf.PrimaryHDU(1/variance_image, header=header) + + return sky_subtracted_original_image_hdu, weight_image_hdu + + + + +def SDSS_sky(_hdu, method="linear"): + ''' + The function converts the second entry of a hdu SDSS frame (sky) to a good + sky array. + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :param _hdu: HDUList from SDSS database + :type _hdu: astropy.io.fits.hdu.hdulist.HDUList + + :param method: Method of interpolation. In can be "linear" (default) + :type method: str + :default: "linear" + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :return: Sky array, in ADUs. + :rtype: numpy.ndarray + + ''' + + from scipy import interpolate + + sky, x, y = _hdu.data[0] + xold = np.arange(np.shape(sky)[0]) + yold = np.arange(np.shape(sky)[1]) + + # `interp2d` is deprecated in SciPy 1.10 and will be removed in SciPy 1.14.0 + # NOTE: `interp2d` has x and y inverted but doesn't need a `meshgrid` to produce the output + # This is the code if `interp2d` is still preferred despite of its deprecation + # tck = interpolate.interp2d(yold, xold, sky, kind=method) + # return tck(x, y) + + tck = interpolate.RegularGridInterpolator((xold,yold), sky, method=method, bounds_error=False, fill_value = None) + + # need to invert x and in y to produce the meshgrid with the right shape + x_grid, y_grid = np.meshgrid(y, x, indexing='ij') + + return tck((x_grid, y_grid)) + + +def generate_FOV_grid(_center, _fov, _step = None, circle=True, wcs = None): + ''' + The function generates a grid of points, each distant :_step: to each other. + The construction follows a recursive hexagon ring, that becomes bigger and + bigger until it encompass the whole FOV. It takes advantage of equilateral + triangles properties, where the height to the top vertex is sqrt(3)/2 times + the side of the triangle. + + If :circle: is True (default), the grid will trimmed to a circle with radius + equal to :_fov: + + If :wcs: is provided, the points outside of the provided WCS are removed. + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :param _center: Coordinates of the center point of the grid + :type _center: astropy.coordinates.sky_coordinate.SkyCoord + + :param _fov: size of the FOV to tile with the grid. This assumes a square + FOV. This quantity effectively translates to the length of the + apothem of outermost hexagon, which will be equal to half of + the diagonal of the square FOV. + :type _fov: astropy.units.quantity.Quantity + + :param _step: distance of each point in the grid. If a value is not + provided, it will default to half :_fov: + :type _fov: astropy.units.quantity.Quantity + + :param circle: If True, trims the grid to a circle with radius :_fov: + :type: bool + :default: True + + :param wcs: WCS information. If provided the points outside of it will + be removed + :type wcs: astropy.wcs.wcs.WCS or None + :default: None + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :return: List of points of the grid. Each element is a astropy SkyCoord + :rtype: list + + ''' + + import math, itertools + + + if _step == None: + _step = _fov/3 + + points = [[_center, 0*u.deg]] + + # Assuming a square FOV of side A, its diagonal is A*sqrt(2). Therefore an + # hexagon will have to have a side of A*sqrt(2)/sqrt(3) in order to contain + # it all, given any rotation. + + for ring in range(1, math.ceil(_fov/_step*np.sqrt(2)/np.sqrt(3))): + # Moving `ring*_step` E from center to start the new hexagon + p = SkyCoord(_center.ra+_step*ring/np.cos(_center.dec.rad), _center.dec) + points.append([p, p.separation(_center)]) + + # The following `for` loops are written in a fancy way. Since we don't need to store + # the iterator variable, and we just need to repeat the same operation N times, we + # can use `itertools.repeat`, which doesn't need to build and return a list, storing + # the index as well, like range, or xrange would do. This is effectively the fastest + # way to iterate N times in Python. + # A more readable and common way of writing this would be + # `for _ in range(N):` + # And can be written like this if preferred, with negligible loss in performance. + # The number N of points along each hexagon side is exactly the Nth ring, so in our + # case N=`ring` + + # The most distant points from the center are the most important, so I store the + # distance to the center as well, to be able to sorted them later. + + #`p` it's always the previous point to move from + + # Turning 60 degrees and starting moving NW + for _ in itertools.repeat(None, ring): + p = SkyCoord(p.ra-_step/2/np.cos(p.dec.rad), p.dec+_step*np.sqrt(3)/2) + points.append([p, p.separation(_center)]) + # Turning 120 degrees and starting moving W + for _ in itertools.repeat(None, ring): + p = SkyCoord(p.ra-_step/np.cos(p.dec.rad), p.dec) + points.append([p, p.separation(_center)]) + # Turning 120 degrees and starting moving SW + for _ in itertools.repeat(None, ring): + p = SkyCoord(p.ra-_step/2/np.cos(p.dec.rad), p.dec-_step*np.sqrt(3)/2) + points.append([p, p.separation(_center)]) + # Turning 120 degrees and starting moving SE + for _ in itertools.repeat(None, ring): + p = SkyCoord(p.ra+_step/2/np.cos(p.dec.rad), p.dec-_step*np.sqrt(3)/2) + points.append([p, p.separation(_center)]) + # Turning 120 degrees and starting moving E + for _ in itertools.repeat(None, ring): + p = SkyCoord(p.ra+_step/np.cos(p.dec.rad), p.dec) + points.append([p, p.separation(_center)]) + # Turning 120 degrees and starting moving NE + for _ in itertools.repeat(None, ring): + p = SkyCoord(p.ra+_step/2/np.cos(p.dec.rad), p.dec+_step*np.sqrt(3)/2) + points.append([p, p.separation(_center)]) + + if circle: + points = [p for p in points if p[1] <= _fov/2*np.sqrt(2)] + + if wcs: + points = [p for p in points if p[0].contained_by(wcs)] + + # Sorting the list according to the distance, with the most distant points first + points.sort(key = lambda x: x[1], reverse=True) + + #Putting the center back at the beginning + points = [points.pop()] + points + + #Returning just the list of coordinates, without the distance + return [p[0] for p in points] + + +def reproject_and_combine(_imglist, _weightlist):#, _coord): + from reproject.mosaicking import find_optimal_celestial_wcs + from reproject import reproject_interp + from reproject.mosaicking import reproject_and_coadd + + wcs_out, shape_out = find_optimal_celestial_wcs(_imglist, resolution=RESOLUTION) + array, footprint = reproject_and_coadd(_imglist, wcs_out, shape_out=shape_out, reproject_function=reproject_interp, input_weights = _weightlist, combine_function='mean') + return array, footprint, wcs_out.to_header() + + + # import numpy as np + # import matplotlib.pyplot as plt + + # plt.figure(figsize=(10, 8)) + # ax1 = plt.subplot(1, 2, 1, projection=wcs_out) + # im1 = ax1.imshow(array, origin='lower', vmin=600, vmax=800) + # ax1.set_title('Mosaic') + # ax2 = plt.subplot(1, 2, 2, projection=wcs_out) + # im2 = ax2.imshow(footprint, origin='lower') + # ax2.set_title('Footprint') + # ax1.plot([49.13674189,49.13062658,48.53523662,48.54543162]*u.deg, [ 41.39245501, 41.83609281,41.82997431, 41.38637844]*u.deg, color = 'r', lw=3, transform=ax1.get_transform("world")) + # ax2.plot([49.13674189,49.13062658,48.53523662,48.54543162]*u.deg, [ 41.39245501, 41.83609281,41.82997431, 41.38637844]*u.deg, color = 'r', lw=3, transform=ax1.get_transform("world")) + + + #grid = generate_FOV_grid(_coord, FOV) + + # for p in grid: + # ax1.plot(p.ra, p.dec, marker='o', color='r', ms=10, transform=ax1.get_transform("world")) + # ax2.plot(p.ra, p.dec, marker='o', color='r', ms=10, transform=ax2.get_transform("world")) + + # plt.show() + + # pf.writeto('test_combine.fits', array, wcs_out.to_header(), overwrite=True) + # pf.writeto('test_footprint.fits', footprint, wcs_out.to_header(), overwrite=True) + + +lco_fits = pf.open('/Users/giacomoterreran/lco/data/snexdata_target7856/tfn1m014-fa20-20231005-0153-e91.fits') +wcs = WCS(lco_fits[0].header) +footprint = wcs.calc_footprint() +target_coord = SkyCoord(lco_fits[0].header['RA'], lco_fits[0].header['DEC'], unit=(u.hourangle, u.deg)) + + +target_coord = SkyCoord(13.576921, -40.392550, unit=(u.deg, u.deg)) +s = survey_request('at2023pcw',target_coord , 'gri') +#s = survey_request(lco_fits[0].header['OBJECT'], target_coord, 'gri') +s.search_for_SDSS() +#s.search_for_DECam() + \ No newline at end of file From 6f2ae56b80bdf964f20b85560987a1c085bfa6ff Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Wed, 1 May 2024 16:20:39 -0700 Subject: [PATCH 07/21] broken --- gw/survey_queries.py | 116 +++++++++++++++++++++++++++++++++++-------- 1 file changed, 95 insertions(+), 21 deletions(-) diff --git a/gw/survey_queries.py b/gw/survey_queries.py index 4eb5b5ac..cad7009c 100644 --- a/gw/survey_queries.py +++ b/gw/survey_queries.py @@ -30,6 +30,7 @@ #https://datalab.noirlab.edu/docs/manual/UsingAstroDataLab/DataAccessInterfaces/SimpleImageAccessSIA/SimpleImageAccessSIA.html #COLLECTION = 'coadd_all' COLLECTION = 'ls_dr9' +COLLECTION = 'coadd/decaps_dr2' DECAM_SERVICE = f'https://datalab.noirlab.edu/sia/{COLLECTION}' #Skymapper urls @@ -73,13 +74,14 @@ def search_for_PS1(self, survey): ''' - tableurl = f'{PS1_TAB}?ra={self.coord.ra.deg}&dec={self.coord.dec.deg}&size={FOV.to(u.arcsec)/0.25}&format=fits&filters={self.filters}' + tableurl = f'{PS1_TAB}?ra={self.coord.ra.deg}&dec={self.coord.dec.deg}&size={FOV.to(u.arcsec).value/0.25}&format=fits&filters={self.filters}&type=stack,stack.wt,stack.mask' table = Table.read(tableurl, format='ascii') for tab in table: flt = tab['filter'] filename = tab['filename'] url = f'{PS1_CUT}?ra={self.coord.ra.deg}&dec={self.coord.dec.deg}&size=2500&format=fits&red={filename}' - out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.fits' + #out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.fits' + out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.{".".join(filename.split(".")[10:])}' urllib.request.urlretrieve(url, out_name) self.templates_paths[f'{survey}_{flt}'] = out_name with pf.open(out_name, mode='update') as hdu: @@ -95,8 +97,7 @@ def search_for_PS1(self, survey): hdu[0].header['DEC'] = self.coord.dec.deg hdu.flush() - - contained_by(self.wcs) + ####### ---SDSS--- ####### @@ -179,11 +180,14 @@ def search_for_SDSS(self, survey): header['AIRMASS'] = 1 header['WCSERR'] = 0 header['BUNIT'] = ('counts', 'ADUs') - pf.writeto(f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.fits', array, header, overwrite=True) + + out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}' + self.templates_paths[f'{survey}_{flt}'] = out_name+'.fits' + pf.writeto(out_name+'.fits', array, header, overwrite=True) #hotpants needs the variance, so we need the writeout 1/footprint header['BUNIT'] = ('counts2', 'ADUs^2') - pf.writeto(f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.var.fits', 1/footprint, header, overwrite=True) + pf.writeto(out_name+'.var.fits', 1/footprint, header, overwrite=True) @@ -194,13 +198,15 @@ def search_for_Skymapper(self, survey): ''' Skymapper has the ability to search for multiple filters in one go, but the returned list of file loses the information on the filter, - so I need to send a seperate quary for each filter. + so I need to send a seperate query for each filter. + Biggest size for Skymapper is 0.17 deg ''' - + from astropy import units as u for flt in self.filters: - url = f'{SKY_CUT}?POS={self.coord.ra.deg},{self.coord.dec.deg}&SIZE={WIDTH_RA},{WIDTH_DEC}&BAND={flt}&FORMAT=image/fits&INTERSECT=covers&RESPONSEFORMAT=VOTABLE' + url = f'{SKY_CUT}?POS={self.coord.ra.deg},{self.coord.dec.deg}&SIZE=0.17,0.17&BAND={flt}&FORMAT=image/fits&INTERSECT=covers&RESPONSEFORMAT=VOTABLE' + u = urllib.request.urlopen(url) s = io.BytesIO(u.read()) @@ -223,6 +229,9 @@ def search_for_Skymapper(self, survey): hdu[0].header['DEC'] = self.coord.dec.deg hdu.flush() + + else: + print(f'Coordinates {self.coord.ra.deg} {self.coord.dec.deg} not in Skymapper with filter {flt}.') @@ -231,12 +240,12 @@ def search_for_Skymapper(self, survey): def search_for_DECam(self, survey): connect = sia.SIAService(DECAM_SERVICE) - table = connect.search(pos = (self.coord.ra.deg,self.coord.dec.deg), size = (WIDTH_RA, WIDTH_DEC), verbosity=2).to_table() + table = connect.search(pos = (self.coord.ra.deg, self.coord.dec.deg), size = (FOV.to(u.deg).value, FOV.to(u.deg).value), verbosity=2).to_table() for flt in self.filters: - - sel = (table['prodtype'] == 'image') & (startswith(table['obs_bandpass'].astype(str), flt)) + sel = (table['prodtype'] == 'image') & (startswith(table['obs_bandpass'].astype(str), flt)) + if len(table[sel]) != 0: out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.fits' urllib.request.urlretrieve(table[sel]['access_url'][0], out_name) @@ -252,6 +261,8 @@ def search_for_DECam(self, survey): hdu[0].header['DEC'] = self.coord.dec.deg hdu.flush() + else: + print(f'Coordinates {self.coord.ra.deg} {self.coord.dec.deg} not in DECam with filter {flt}.') def get_SDSS_gain_and_darkVariance(_flt, _run, _camcol): @@ -483,17 +494,80 @@ def reproject_and_combine(_imglist, _weightlist):#, _coord): # pf.writeto('test_combine.fits', array, wcs_out.to_header(), overwrite=True) # pf.writeto('test_footprint.fits', footprint, wcs_out.to_header(), overwrite=True) - -lco_fits = pf.open('/Users/giacomoterreran/lco/data/snexdata_target7856/tfn1m014-fa20-20231005-0153-e91.fits') -wcs = WCS(lco_fits[0].header) -footprint = wcs.calc_footprint() -target_coord = SkyCoord(lco_fits[0].header['RA'], lco_fits[0].header['DEC'], unit=(u.hourangle, u.deg)) +# lco_fits = pf.open('/Users/giacomoterreran/lco/data/snexdata_target7856/tfn1m014-fa20-20231005-0153-e91.fits') +# wcs = WCS(lco_fits[0].header) +# footprint = wcs.calc_footprint() +# target_coord = SkyCoord(lco_fits[0].header['RA'], lco_fits[0].header['DEC'], unit=(u.hourangle, u.deg)) + +# import numpy as np +# import matplotlib.pyplot as plt + +# im = pf.open('/Users/giacomoterreran/lco/data/data/extdata/O4/PS1/SN2023rbk_g.fits') + +# plt.figure(figsize=(10, 8)) +# ax1 = plt.subplot(1, 1, 1, projection=WCS(im[0])) +# im1 = ax1.imshow(im[0].data, origin='lower', vmin=600, vmax=800) +# ax1.plot([49.13674189,49.13062658,48.53523662,48.54543162]*u.deg, [ 41.39245501, 41.83609281,41.82997431, 41.38637844]*u.deg, color = 'r', lw=3, transform=ax1.get_transform("world")) + + +# grid = generate_FOV_grid(target_coord, FOV) -target_coord = SkyCoord(13.576921, -40.392550, unit=(u.deg, u.deg)) -s = survey_request('at2023pcw',target_coord , 'gri') +# for p in grid: +# ax1.plot(p.ra, p.dec, marker='o', color='r', ms=10, transform=ax1.get_transform("world")) + +# plt.show() +# exit() + +#target_coord = SkyCoord(13.576921, -40.392550, unit=(u.deg, u.deg)) +#s = survey_request('at2023pcw',target_coord , 'gri') #s = survey_request(lco_fits[0].header['OBJECT'], target_coord, 'gri') -s.search_for_SDSS() +#s.search_for_SDSS() +#s.search_for_PS1() #s.search_for_DECam() - \ No newline at end of file + +galaxies = [[125.5433,-25.8880], + [125.4069,-21.5691], + [123.8377,-33.1189], + [122.7733,-30.8752], + [124.7261,-32.7621], + [124.3686,-31.8556], + [124.2324,-31.4309], + [125.9628,-28.2488], + [122.3471,-31.1373], + [125.8190,-22.0851], + [123.9586,-31.1844], + [121.8064,-31.9581], + [125.4043,-33.0136], + [123.1610,-31.8733], + [122.7131,-33.0211], + [120.6195,-29.2095], + [122.8184,-30.9066], + [125.0188,-37.1348], + [124.6821,-32.4162], + [126.6102,-27.3605], + [124.7649,-37.0665], + [123.7104,-34.3874], + [121.4886,-26.8309], + [123.3382,-28.0374], + [123.8712,-28.3101], + [124.8092,-28.7278], + [125.3822,-30.4291], + [124.1169,-27.8063], + [122.1070,-31.0502], + [125.2081,-37.1875]] + +url = 'https://alasky.cds.unistra.fr/hips-image-services/hips2fits#ra=119.51849999999999&dec=-27.298400000000004&fov=0.4&projection=AIT' + +urllib.request.urlretrieve(url, 'test.html') + +# for g in galaxies: +# name = str(g[0])+'_'+str(g[1]) +# target_coord = SkyCoord(g[0], g[1], unit=(u.deg, u.deg)) +# s = survey_request(name, target_coord, 'gri') + +# #s.search_for_Skymapper() +# s.search_for_DECam() + + From 88b02ec78346f8ffcb96321ebd643220abd26aff Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Thu, 2 May 2024 11:15:58 -0700 Subject: [PATCH 08/21] working on it --- gw/survey_queries.py | 111 +++++++++++++++++++++++-------------------- 1 file changed, 59 insertions(+), 52 deletions(-) diff --git a/gw/survey_queries.py b/gw/survey_queries.py index cad7009c..f4e21021 100644 --- a/gw/survey_queries.py +++ b/gw/survey_queries.py @@ -36,8 +36,12 @@ #Skymapper urls SKY_CUT = 'https://api.skymapper.nci.org.au/public/siap/dr4/query' -FOV = 26.5*u.arcmin #Sinistro FOV is 26.5'x26.5' -RESOLUTION = 0.389*u.arcsec +#Specifications for different instruments +#The FOV here is thought as the diameter of a circle that would circumscribe the LCO image. +FOV = {'sinistro': 26.5*np.sqrt(2)*u.arcmin, 'muscat': 9.1*np.sqrt(2)*u.arcmin, 'qhy': np.sqrt(1.9**2 + 1.2**2)*u.deg} +RESOLUTION = {'sinistro':0.389*u.arcsec, 'muscat': 0.27*u.arcsec, 'qhy':0.74*u.arcsec} + + class survey_request: ''' @@ -49,18 +53,56 @@ class survey_request: the template in the 'hdu' attribute. ''' + def __init__(self, _obj, _coord, _filters): + ''' + Initializing the class with the name of the object :_obj:, + the coordinates :_coord: and the filters :_filters: to search. + The empty dictionary :templates_paths: is also created. + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :_obj: Name of the object, mainly for file naming purposes + :type _obj: string + + :_coord: coordinates of the object, corresponding to the center + of the search + :type _coord: astropy.coordinates.sky_coordinate.SkyCoord + + :_filters: filters to search. If searching for multiple filters + use a single long string, e.g. 'gri' + :type _filters: string + + ''' self.obj = _obj self.coord = _coord self.filters = _filters self.templates_paths = {} + #Defining a decorator. Because decorators are cool def set_survey(_survey): + ''' + This decorator set the name of survey for the following function, through the + :_survey: variable, and initialize the :self.templates_paths: dictionary with + a '--' string for that specific survey. The idea is that by initializing the + dictionary, there will be a record that the search has been performed. + Otherwise the dictionary will not have any entry for that survey. + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :_survey: Name of the survey + :type _center: string + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :return: Decorated function + :rtype: function + + ''' def decorate(function): def initialize_survey(self): + #initializing all the filters for flt in self.filters: self.templates_paths[f'{_survey}_{flt}'] = '--' + print(f'Searching for {_survey} templates.') return function(self, _survey) return initialize_survey return decorate @@ -70,12 +112,22 @@ def initialize_survey(self): @set_survey('PS1') def search_for_PS1(self, survey): ''' - PS1 needs the size in pixels, considering 0.25 arcsec/pixel. + This function searches and downloads PS1 templates. + Together with the data, it will fetch also the weight images (i.e. 1/variance) + and the mask images. ''' - tableurl = f'{PS1_TAB}?ra={self.coord.ra.deg}&dec={self.coord.dec.deg}&size={FOV.to(u.arcsec).value/0.25}&format=fits&filters={self.filters}&type=stack,stack.wt,stack.mask' + #Creating the url to query the PS1 database. + #NOTE: PS1 needs the size in pixels, considering 0.25 arcsec/pixel. + tableurl = f'{PS1_TAB}?ra={self.coord.ra.deg}&dec={self.coord.dec.deg}&size={FOV["sinistro"].to(u.arcsec).value/0.25}&format=fits&filters={self.filters}&type=stack,stack.wt,stack.mask' table = Table.read(tableurl, format='ascii') + + #If the table is empty, the images are not in PS1 + if len(table) == 0: + print(f'Provided coordinates {self.coord.ra.deg},{self.coord.dec.deg} do not appear to be in the PS1 footprint.') + return + for tab in table: flt = tab['filter'] filename = tab['filename'] @@ -520,54 +572,9 @@ def reproject_and_combine(_imglist, _weightlist):#, _coord): # plt.show() # exit() -#target_coord = SkyCoord(13.576921, -40.392550, unit=(u.deg, u.deg)) -#s = survey_request('at2023pcw',target_coord , 'gri') +target_coord = SkyCoord(13.576921, -40.392550, unit=(u.deg, u.deg)) +s = survey_request('at2023pcw',target_coord , 'gri') #s = survey_request(lco_fits[0].header['OBJECT'], target_coord, 'gri') #s.search_for_SDSS() -#s.search_for_PS1() +s.search_for_PS1() #s.search_for_DECam() - -galaxies = [[125.5433,-25.8880], - [125.4069,-21.5691], - [123.8377,-33.1189], - [122.7733,-30.8752], - [124.7261,-32.7621], - [124.3686,-31.8556], - [124.2324,-31.4309], - [125.9628,-28.2488], - [122.3471,-31.1373], - [125.8190,-22.0851], - [123.9586,-31.1844], - [121.8064,-31.9581], - [125.4043,-33.0136], - [123.1610,-31.8733], - [122.7131,-33.0211], - [120.6195,-29.2095], - [122.8184,-30.9066], - [125.0188,-37.1348], - [124.6821,-32.4162], - [126.6102,-27.3605], - [124.7649,-37.0665], - [123.7104,-34.3874], - [121.4886,-26.8309], - [123.3382,-28.0374], - [123.8712,-28.3101], - [124.8092,-28.7278], - [125.3822,-30.4291], - [124.1169,-27.8063], - [122.1070,-31.0502], - [125.2081,-37.1875]] - -url = 'https://alasky.cds.unistra.fr/hips-image-services/hips2fits#ra=119.51849999999999&dec=-27.298400000000004&fov=0.4&projection=AIT' - -urllib.request.urlretrieve(url, 'test.html') - -# for g in galaxies: -# name = str(g[0])+'_'+str(g[1]) -# target_coord = SkyCoord(g[0], g[1], unit=(u.deg, u.deg)) -# s = survey_request(name, target_coord, 'gri') - -# #s.search_for_Skymapper() -# s.search_for_DECam() - - From b3f944374befe1722537a6682b10271a2b72cc89 Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Tue, 14 May 2024 09:01:32 -0700 Subject: [PATCH 09/21] Full PanSTARRS and SDSS template retrieval complete. --- gw/survey_queries.py | 800 +++++++++++++++++++++++++------------------ 1 file changed, 473 insertions(+), 327 deletions(-) diff --git a/gw/survey_queries.py b/gw/survey_queries.py index f4e21021..0f701c0f 100644 --- a/gw/survey_queries.py +++ b/gw/survey_queries.py @@ -4,43 +4,43 @@ """ from astropy.table import Table -from astropy.io.votable import parse from astropy import units as u import astropy.io.fits as pf from astropy.time import Time from astroquery.sdss import SDSS from astropy.coordinates import SkyCoord import astropy.units as u -import urllib.request, io -from numpy.core.defchararray import startswith -from pyvo.dal import sia -import os +import os, math import numpy as np from astropy.wcs import WCS -#Folder definitions +# Folder definitions OUT_FOLDER = os.getenv('GWTEMPLATES_FOLDER') #OUT_FOLDER = '/supernova/data/extdata/GW_templates/O4' -#PS1 urls +# PS1 urls PS1_TAB = 'https://ps1images.stsci.edu/cgi-bin/ps1filenames.py' -PS1_CUT = 'https://ps1images.stsci.edu/cgi-bin/fitscut.cgi' -#List of available services at: -#https://datalab.noirlab.edu/docs/manual/UsingAstroDataLab/DataAccessInterfaces/SimpleImageAccessSIA/SimpleImageAccessSIA.html -#COLLECTION = 'coadd_all' -COLLECTION = 'ls_dr9' -COLLECTION = 'coadd/decaps_dr2' -DECAM_SERVICE = f'https://datalab.noirlab.edu/sia/{COLLECTION}' - -#Skymapper urls -SKY_CUT = 'https://api.skymapper.nci.org.au/public/siap/dr4/query' - -#Specifications for different instruments -#The FOV here is thought as the diameter of a circle that would circumscribe the LCO image. -FOV = {'sinistro': 26.5*np.sqrt(2)*u.arcmin, 'muscat': 9.1*np.sqrt(2)*u.arcmin, 'qhy': np.sqrt(1.9**2 + 1.2**2)*u.deg} +# PS1 provides a cutout API. We probably prefer to use the whole +# skycell, but we can easily change that with this bool variable +SKYCELL = True +if SKYCELL: + PS1_CUT = 'https://ps1images.stsci.edu/' +else: + PS1_CUT = 'https://ps1images.stsci.edu/cgi-bin/fitscut.cgi' + +# Specifications for different instruments +# The FOV here is thought as the diameter of a circle that would circumscribe the LCO image. +# This is effectively bigger than the actual FOV of each instrument, but it will guarantee +# that the full FOV is covered given any rotation +INSTRUMENT_LIST = ['sinistro', 'muscat', 'qhy'] +#FOV = {'sinistro': 26.5*np.sqrt(2)*u.arcmin, 'muscat': 9.1*np.sqrt(2)*u.arcmin, 'qhy': np.sqrt(1.9**2 + 1.2**2)*u.deg} +# The FOV of 'qhy' is rectangular, se we calculate the side of square having the same diagonal. +FOV = {'sinistro': 26.5*u.arcmin, 'muscat': 9.1*u.arcmin, 'qhy': np.sqrt((1.9**2 + 1.2**2)/2)*u.deg} RESOLUTION = {'sinistro':0.389*u.arcsec, 'muscat': 0.27*u.arcsec, 'qhy':0.74*u.arcsec} +SURVEYS_SKYCELL_SIZES = {'PS1': 0.4*u.deg, 'SDSS': 10*u.arcmin} + class survey_request: @@ -54,10 +54,14 @@ class survey_request: ''' - def __init__(self, _obj, _coord, _filters): + def __init__(self, _obj, _coord, _filters, _inst): ''' Initializing the class with the name of the object :_obj:, the coordinates :_coord: and the filters :_filters: to search. + The template images will be resampled to match the resolution + of the instrument (or list of instruments) provided by :_inst:. + The template FOV will also be as big as to include the whole + FOV of that instrument. The empty dictionary :templates_paths: is also created. *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* @@ -72,14 +76,35 @@ def __init__(self, _obj, _coord, _filters): use a single long string, e.g. 'gri' :type _filters: string + :_inst: instrument name, or list of instrument names. In order + to be valid, the name will be checked against the global + variable :INSTRUMENT_LIST:, defined above. + :type _inst: string or list + ''' self.obj = _obj self.coord = _coord self.filters = _filters + + # Making the :instruments: list and checking the instruments exist. + if isinstance(_inst, str): + if _inst == 'all': + self.instruments = INSTRUMENT_LIST + else: + self.instruments = [_inst] + else: + self.instruments = _inst + for i in self.instruments: + if i not in INSTRUMENT_LIST: + raise ValueError(f'Instrument `{i}` does not appear to be currently supported. If this is a new instrument, you will need to included it in the INSTRUMENT_LIST, as well as update the FOV and RESOLUTION dictionaries.') + + # Sorting the instruments in decreasing FOV size. + self.instruments.sort(key = lambda x: FOV[x], reverse=True) + self.templates_paths = {} - #Defining a decorator. Because decorators are cool + # Defining a decorator. Because decorators are cool def set_survey(_survey): ''' This decorator set the name of survey for the following function, through the @@ -99,10 +124,11 @@ def set_survey(_survey): ''' def decorate(function): def initialize_survey(self): - #initializing all the filters + # Initializing all the filters for flt in self.filters: - self.templates_paths[f'{_survey}_{flt}'] = '--' - print(f'Searching for {_survey} templates.') + for inst in self.instruments: + self.templates_paths[f'{inst}_{_survey}_{flt}'] = '--' + print(f'Searching for {_survey} templates.\n') return function(self, _survey) return initialize_survey return decorate @@ -118,37 +144,160 @@ def search_for_PS1(self, survey): ''' - #Creating the url to query the PS1 database. - #NOTE: PS1 needs the size in pixels, considering 0.25 arcsec/pixel. - tableurl = f'{PS1_TAB}?ra={self.coord.ra.deg}&dec={self.coord.dec.deg}&size={FOV["sinistro"].to(u.arcsec).value/0.25}&format=fits&filters={self.filters}&type=stack,stack.wt,stack.mask' - table = Table.read(tableurl, format='ascii') + # Instruments are sorted by FOV size. + # :self.instruments[0]: is the instrument with the biggest FOV + # and therefore requires the biggest template. We start creating + # this, and instead of repeating the procedure, we just scale down + # this first template to match the smaller requirements. + inst = self.instruments[0] + + # Generate grid of points covering the FOV + grid = generate_FOV_grid(_center = self.coord, _fov = FOV[inst], _step=SURVEYS_SKYCELL_SIZES[survey]) + + # NOTE: wt are actually variance maps + data_lists = {'stack':{}, 'mask':{}, 'wt':{}} + + while True: + # Initially :grid[0]: will be the center of the field. + # After each iteration, the :grid: array is trimmed down + # and its first element will be the point on the grid + # most distant from the center + + # Creating the url to query the PS1 database. + # NOTE: PS1 needs the size in pixels, considering 0.25 arcsec/pixel. + # This first query returns a Table with a list of files + print(f'Querying the PS1 database for coordinates {grid[0].ra.deg} {grid[0].dec.deg}.') + tableurl = f'{PS1_TAB}?ra={grid[0].ra.deg}&dec={grid[0].dec.deg}&size={FOV[inst].to(u.arcsec).value/0.25}&format=fits&filters={self.filters}&type=stack,stack.wt,stack.mask' + table = Table.read(tableurl, format='ascii') + + # If the table is empty, the images are not in PS1 + if len(table) == 0: + print(f'\nThe provided coordinates {grid[0].ra.deg},{grid[0].dec.deg} do not appear to be in the PS1 footprint.\n') + return + + print('Coordinates in PS1 footprint.') + for tab in table: + flt = tab['filter'] + filename = tab['filename'] + filetype = tab['type'].split('.')[-1] + + # Initializing dictionaries + if flt not in data_lists[filetype]: + data_lists[filetype][flt]=[] + data_lists[filetype][flt]=[] + data_lists[filetype][flt]=[] + + if SKYCELL: + url = f'{PS1_CUT}{filename}' + else: + url = f'{PS1_CUT}?ra={grid[0].ra.deg}&dec={grid[0].dec.deg}&size=6000&format=fits&red={filename}' + + print(f'Retrieving file {filename}') + with pf.open(url) as hdulist: + # The skycell headers need some adjustments + if SKYCELL: + #hdulist[1].header['TIMESYS'] = 'TAI' + hdulist[1].header['RADESYSa'] = 'FK5' + hdulist[1].header['DATE-OBS'] = 'T'.join(Time(hdulist[1].header['MJD-OBS'], format='mjd', scale='utc').iso.split()) + hdulist[1].header['PC1_1'] = hdulist[1].header['PC001001'] + hdulist[1].header['PC1_2'] = hdulist[1].header['PC001002'] + hdulist[1].header['PC2_1'] = hdulist[1].header['PC002001'] + hdulist[1].header['PC2_2'] = hdulist[1].header['PC002002'] + del hdulist[1].header['PC001001'] + del hdulist[1].header['PC001002'] + del hdulist[1].header['PC002001'] + del hdulist[1].header['PC002002'] + + # The algorithm for decompressing fits file data + # doesn't work on PS1 skycells since astropy version 5.3 + # The easiest way to fix this, is to remove the + # 'ZBLANK' key (assuming it's equal to 'BLANK') + if 'ZBLANK' in hdulist[1].header: + del hdulist[1].header['ZBLANK'] + + # The image flux scaling is also non-standard for the stack images + if filetype != 'mask': + hdulist[1].data = hdulist[1].header['BOFFSET'] + hdulist[1].header['BSOFTEN'] * (10**(0.4*hdulist[1].data) - 10**(-0.4*hdulist[1].data)) + del hdulist[1].header['BLANK'] + + # Getting rid of the edge nans + fixed_hdu = trim_nan(hdulist[1]) + else: + fixed_hdu = trim_nan(hdulist[0]) + + del fixed_hdu.header['RADESYS'] + + # Since the weight images are actually variance maps, we need + # to take the reciprocal of it + if filetype == 'wt': + fixed_hdu.data = 1/fixed_hdu.data - #If the table is empty, the images are not in PS1 - if len(table) == 0: - print(f'Provided coordinates {self.coord.ra.deg},{self.coord.dec.deg} do not appear to be in the PS1 footprint.') - return + data_lists[filetype][flt].append(fixed_hdu) + + for skycell in data_lists['stack'][self.filters[0]]: + #trimming down the grid, leaving the points currently not in any skycell + grid = [p for p in grid if not p.contained_by(WCS(skycell.header))] + + if len(grid) == 0: + break + + # We now trim the list of skycells according to each instrument FOV + # For the first instrument this step should be redundant since we + # selected the skycell in order to tile the FOV already. + for inst in self.instruments: + new_grid = generate_FOV_grid(_center = self.coord, _fov = FOV[inst], _step=SURVEYS_SKYCELL_SIZES[survey]) + for flt in data_lists['stack']: + # Since enumerate() returns a generator and generators can't be reversed, + # we need to convert it to a list first + for skycell_idx, skycell in reversed(list(enumerate(data_lists['stack'][flt]))): + contained = False + for p in new_grid: + if p.contained_by(WCS(skycell.header)): + contained = True + break + # If no point of the grid is contained by the skycell, + # drop it from all the data lists + if not contained: + for filetype in data_lists: + data_lists[filetype][flt].pop(skycell_idx) + + print(f'Reprojecting and combining skycells of filer {flt} for {inst}.') + array, footprint, header = reproject_and_combine(data_lists['stack'][flt], data_lists['wt'][flt], inst) + + # Keeping track of the single skycells that are being combined. + header['NSKYCELL'] = (len(data_lists['stack'][flt]), 'Number of SDSS skycells merged') + for i,im in enumerate(data_lists['stack'][flt], start=1): + header[f'STK_TY{i:>02}'] = (im.header['STK_TYPE'], f'type of stack {i}') + header[f'STK_ID{i:>02}'] = (im.header['STK_ID'], f'type of stack {i}') + header[f'SKYCEL{i:>02}'] = (im.header['SKYCELL'], f'type of stack {i}') + header[f'TES_ID{i:>02}'] = (im.header['TESS_ID'], f'type of stack {i}') + + # Populating the the header with the keys needed by the pipeline + header['RA'] = self.coord.ra.deg + header['DEC'] = self.coord.dec.deg + header['CAT-RA'] = self.coord.ra.deg + header['CAT-DEC'] = self.coord.dec.deg + header['MJD-OBS'] = (np.mean([im.header['MJD-OBS'] for im in data_lists['stack'][flt]]), 'Average MJD') + header['DATE-OBS'] = 'T'.join(Time(header['MJD-OBS'], format='mjd', scale='utc').iso.split()) + header['DAY-OBS'] = header['DATE-OBS'].split('T')[0].replace('-', '') + header['TELESCOP'] = 'PS1' + header['INSTRUME'] = 'GPC1' + header['OBJECT'] = self.obj + header['FILTER'] = flt+'p' # NOTE: this works only with griz + header['AIRMASS'] = 1 + header['WCSERR'] = 0 + header['PIXSCALE'] = (RESOLUTION[inst].value, 'arcsec/pixel') + header['BUNIT'] = ('counts', 'ADUs') + + # Writing out the final combined template + out_name = f'{OUT_FOLDER}/{survey}/{inst}_{self.obj}_{flt}' + self.templates_paths[f'{inst}_{survey}_{flt}'] = out_name+'.fits' + pf.writeto(out_name+'.fits', array, header, overwrite=True) + + # Hotpants needs the variance, so we need to write out 1/footprint + header['BUNIT'] = ('counts2', 'ADUs^2') + pf.writeto(out_name+'.var.fits', 1/footprint, header, overwrite=True) - for tab in table: - flt = tab['filter'] - filename = tab['filename'] - url = f'{PS1_CUT}?ra={self.coord.ra.deg}&dec={self.coord.dec.deg}&size=2500&format=fits&red={filename}' - #out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.fits' - out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.{".".join(filename.split(".")[10:])}' - urllib.request.urlretrieve(url, out_name) - self.templates_paths[f'{survey}_{flt}'] = out_name - with pf.open(out_name, mode='update') as hdu: - date_obs = Time(hdu[0].header['MJD-OBS'], format='mjd', scale='utc').iso.split() - hdu[0].header['DATE-OBS'] = 'T'.join(date_obs) - hdu[0].header['DAY-OBS'] = date_obs[0] - hdu[0].header['FILTER'] = flt+'p' #note, this works only with griz - hdu[0].header['TELESCOP'] = 'PS1' - hdu[0].header['INSTRUME'] = 'GPC1' - hdu[0].header['OBJECT'] = self.obj - hdu[0].header['WCSERR'] = 0 - hdu[0].header['RA'] = self.coord.ra.deg - hdu[0].header['DEC'] = self.coord.dec.deg - - hdu.flush() @@ -156,46 +305,62 @@ def search_for_PS1(self, survey): @set_survey('SDSS') def search_for_SDSS(self, survey): ''' - Each tile has a 10 by 13 arcminutes FOV. + This function searches and downloads SDSS templates. + Together with the data, it will fetch also the variance images ''' - grid = generate_FOV_grid(self.coord, FOV) + # Instruments are sorted by FOV size. + # :self.instruments[0]: is the instrument with the biggest FOV + # and therefore requires the biggest template. We start creating + # this, and instead of repeating the procedure, we just scale down + # this first template to match the smaller requirements. + inst = self.instruments[0] + + # Generate grid of points covering the FOV + grid = generate_FOV_grid(_center = self.coord, _fov = FOV[inst], _step=SURVEYS_SKYCELL_SIZES[survey]) all_pointings = [] + img_data_list = {} + weight_data_list = {} + for f,flt in enumerate(self.filters): - img_data_list = [] - weight_data_list = [] + img_data_list[flt] = [] + weight_data_list[flt] = [] # For the first filter, we need to collect the references for all the required SDSS fields # We don't need to repeat the query for the following filters - # The SDSS fields references are stored in `all_pointings` + # The SDSS fields references are stored in :all_pointings: if f == 0: while True: # SDSS queries radius must be less than 3.0 arcmin. - # initially `grid[0]` will be the center of the field. - # After each iteration, the `grid` array is trimmed down and its first element will be + # Initially :grid[0]: will be the center of the field. + # After each iteration, the :grid: array is trimmed down and its first element will be # the point on the grid most distant from the center + print(f'Querying the SDSS database for coordinates {grid[0].ra.deg} {grid[0].dec.deg}.') xid = SDSS.query_region(grid[0], radius=3*u.arcmin, spectro=False) if xid is None: + print(f'\nThe provided coordinates {grid[0].ra.deg},{grid[0].dec.deg} do not appear to be in the SDSS footprint.\n') return + print('Coordinates in SDSS footprint.') new_pointings = [list(el) for el in np.unique(xid['run','camcol','field']) if list(el) not in all_pointings] all_pointings = all_pointings + new_pointings for run, camcol, field in new_pointings: #fetching the fits as an astropy hdu + print(f'Retrieving file with run={run}, camcol={camcol}, field={field} and filter {flt}.') im = SDSS.get_images(run=run, camcol=camcol, field=field, band=flt, cache=True)[0] sky_subtracted_original_image_hdu, weight_image_hdu = elaborate_SDSS(im, flt, run, camcol, field) - img_data_list.append(sky_subtracted_original_image_hdu) - weight_data_list.append(weight_image_hdu) + img_data_list[flt].append(sky_subtracted_original_image_hdu) + weight_data_list[flt].append(weight_image_hdu) - for tile in img_data_list: + for tile in img_data_list[flt]: grid = [p for p in grid if not p.contained_by(WCS(tile.header))] if len(grid) == 0: @@ -208,123 +373,98 @@ def search_for_SDSS(self, survey): im = SDSS.get_images(run=run, camcol=camcol, field=field, band=flt, cache=True)[0] sky_subtracted_original_image_hdu, weight_image_hdu = elaborate_SDSS(im, flt, run, camcol, field) - img_data_list.append(sky_subtracted_original_image_hdu) - weight_data_list.append(weight_image_hdu) + img_data_list[flt].append(sky_subtracted_original_image_hdu) + weight_data_list[flt].append(weight_image_hdu) - array, footprint, header = reproject_and_combine(img_data_list, weight_data_list) - - header['NTILES'] = (len(img_data_list), 'Number of SDSS tiles merged') - for i,im in enumerate(img_data_list, start=1): - header[f'RUN{i:>03}'] = (im.header['RUN'], f'Run number of tile {i}') - header[f'CAMCO{i:>03}'] = (im.header['CAMCOL'], f'Column in the imaging camera of tile {i}') - header[f'FIELD{i:>03}'] = (im.header['FIELD'], f'Field number of tile {i}') - header[f'MJD{i:>03}'] = (im.header['MJD-OBS'], f'MJD of tile {i}') - - - header['MJD-OBS'] = (np.mean([im.header['MJD-OBS'] for im in img_data_list]), 'Average MJD') - header['DATE-OBS'] = 'T'.join(Time(header['MJD-OBS'], format='mjd', scale='utc').iso.split()) - header['DAY-OBS'] = header['DATE-OBS'].split('T')[0] - header['TELESCOP'] = '2.5m' - header['INSTRUME'] = 'SDSS' - header['OBJECT'] = self.obj - header['FILTER'] = flt+'p' #note, this works only with griz - header['AIRMASS'] = 1 - header['WCSERR'] = 0 - header['BUNIT'] = ('counts', 'ADUs') - - out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}' - self.templates_paths[f'{survey}_{flt}'] = out_name+'.fits' - pf.writeto(out_name+'.fits', array, header, overwrite=True) - - #hotpants needs the variance, so we need the writeout 1/footprint - header['BUNIT'] = ('counts2', 'ADUs^2') - pf.writeto(out_name+'.var.fits', 1/footprint, header, overwrite=True) - - - - -####### ---Skymapper--- ####### - @set_survey('Skymapper') - def search_for_Skymapper(self, survey): - ''' - Skymapper has the ability to search for multiple filters in one go, - but the returned list of file loses the information on the filter, - so I need to send a seperate query for each filter. - Biggest size for Skymapper is 0.17 deg - - ''' - from astropy import units as u - for flt in self.filters: + # We now trim the list of skycells according to each instrument FOV + # For the first instrument this step should be redundant since we + # selected the skycell in order to tile the FOV already. + for inst in self.instruments: + new_grid = generate_FOV_grid(_center = self.coord, _fov = FOV[inst], _step=SURVEYS_SKYCELL_SIZES[survey]) + for flt in img_data_list: + # Since enumerate() returns a generator and generators can't be reversed, + # we need to convert it to a list first + for skycell_idx, skycell in reversed(list(enumerate(img_data_list[flt]))): + contained = False + for p in new_grid: + if p.contained_by(WCS(skycell.header)): + contained = True + break + # If no point of the grid is contained by the skycell, + # drop it from all the data lists + if not contained: + img_data_list[flt].pop(skycell_idx) + weight_data_list[flt].pop(skycell_idx) + + print(f'Reprojecting and combining skycells of filer {flt} for {inst}.') + array, footprint, header = reproject_and_combine(img_data_list[flt], weight_data_list[flt], inst) + + + + header['NTILES'] = (len(img_data_list[flt]), 'Number of SDSS tiles merged') + for i,im in enumerate(img_data_list[flt], start=1): + header[f'RUN{i:>03}'] = (im.header['RUN'], f'Run number of tile {i}') + header[f'CAMCO{i:>03}'] = (im.header['CAMCOL'], f'Column in the imaging camera of tile {i}') + header[f'FIELD{i:>03}'] = (im.header['FIELD'], f'Field number of tile {i}') + header[f'MJD{i:>03}'] = (im.header['MJD-OBS'], f'MJD of tile {i}') + + header['RA'] = self.coord.ra.deg + header['DEC'] = self.coord.dec.deg + header['CAT-RA'] = self.coord.ra.deg + header['CAT-DEC'] = self.coord.dec.deg + header['MJD-OBS'] = (np.mean([im.header['MJD-OBS'] for im in img_data_list[flt]]), 'Average MJD') + header['DATE-OBS'] = 'T'.join(Time(header['MJD-OBS'], format='mjd', scale='utc').iso.split()) + header['DAY-OBS'] = header['DATE-OBS'].split('T')[0].split('T')[0].replace('-', '') + header['TELESCOP'] = '2.5m' + header['INSTRUME'] = 'SDSS' + header['OBJECT'] = self.obj + header['FILTER'] = flt+'p' #note, this works only with griz + header['AIRMASS'] = 1 + header['WCSERR'] = 0 + header['PIXSCALE'] = (RESOLUTION[inst].value, 'arcsec/pixel') + header['BUNIT'] = ('counts', 'ADUs') + + out_name = f'{OUT_FOLDER}/{survey}/{inst}_{self.obj}_{flt}' + self.templates_paths[f'{inst}_{survey}_{flt}'] = out_name+'.fits' + pf.writeto(out_name+'.fits', array, header, overwrite=True) + + #hotpants needs the variance, so we need the write out 1/footprint + header['BUNIT'] = ('counts2', 'ADUs^2') + pf.writeto(out_name+'.var.fits', 1/footprint, header, overwrite=True) - url = f'{SKY_CUT}?POS={self.coord.ra.deg},{self.coord.dec.deg}&SIZE=0.17,0.17&BAND={flt}&FORMAT=image/fits&INTERSECT=covers&RESPONSEFORMAT=VOTABLE' + +def elaborate_SDSS(_im, _flt, _run, _camcol, _field): + ''' + SDSS images require a little bit of manipulation. + This function will produce a ready to be used image + as well as its corresponding weight image. - u = urllib.request.urlopen(url) - s = io.BytesIO(u.read()) - votable = parse(s) + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :param _im: SDSS image HDUList + :type _im: `~astropy.io.fits.HDUList` - for resource in votable.resources: - for t in resource.tables: - table = t.array['get_fits'][:] + :param _flt: SDSS filter + :type _flt: string - if len(table) != 0: - out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.fits' - urllib.request.urlretrieve(table[0], out_name) - self.templates_paths[f'{survey}_{flt}'] = out_name - with pf.open(out_name, mode='update') as hdu: - hdu[0].header['DAY-OBS'] = hdu[0].header['DATE-OBS'].split('T')[0] - hdu[0].header['FILTER'] = flt+'p' #note, this works only with griz - hdu[0].header['OBJECT'] = self.obj - hdu[0].header['WCSERR'] = 0 - hdu[0].header['RA'] = self.coord.ra.deg - hdu[0].header['DEC'] = self.coord.dec.deg + :param _run: Run number + :type _run: int - hdu.flush() - - else: - print(f'Coordinates {self.coord.ra.deg} {self.coord.dec.deg} not in Skymapper with filter {flt}.') - - + :param _camcol: Column in the imaging camera + :type _camcol: int -####### ---DECam--- ####### - @set_survey('DECam') - def search_for_DECam(self, survey): + :param _field: Field number + :type _field: int - connect = sia.SIAService(DECAM_SERVICE) - table = connect.search(pos = (self.coord.ra.deg, self.coord.dec.deg), size = (FOV.to(u.deg).value, FOV.to(u.deg).value), verbosity=2).to_table() + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :return: HDU of the image, HDU of the weight image + :rtype: list - for flt in self.filters: - - sel = (table['prodtype'] == 'image') & (startswith(table['obs_bandpass'].astype(str), flt)) - - if len(table[sel]) != 0: - out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.fits' - urllib.request.urlretrieve(table[sel]['access_url'][0], out_name) - self.templates_paths[f'{survey}_{flt}'] = out_name - with pf.open(out_name, mode='update') as hdu: - hdu[0].header['MJD-OBS'] = hdu[0].header['MJD_MEAN'] - hdu[0].header['DATE-OBS'] = hdu[0].header['DATEOBS'] - hdu[0].header['DAY-OBS'] = hdu[0].header['DATE-OBS'].split('T')[0] - hdu[0].header['FILTER'] = flt+'p' #note, this works only with griz - hdu[0].header['OBJECT'] = self.obj - hdu[0].header['WCSERR'] = 0 - hdu[0].header['RA'] = self.coord.ra.deg - hdu[0].header['DEC'] = self.coord.dec.deg - - hdu.flush() - else: - print(f'Coordinates {self.coord.ra.deg} {self.coord.dec.deg} not in DECam with filter {flt}.') - + ''' -def get_SDSS_gain_and_darkVariance(_flt, _run, _camcol): - #retrieve gain and darkVariance from another online table - q = SDSS.query_sql(f'SELECT DISTINCT gain_{_flt}, darkVariance_{_flt} FROM field WHERE run={_run} AND camcol={_camcol}') - gain = q[f'gain_{_flt}'][0] - darkVariance = q[f'darkVariance_{_flt}'][0] - return gain, darkVariance + from scipy import interpolate -def elaborate_SDSS(_im, _flt, _run, _camcol, _field): header = _im[0].header #Although we do not write out each single tile, this edits in the header will @@ -336,10 +476,29 @@ def elaborate_SDSS(_im, _flt, _run, _camcol, _field): #For some reason the field info is not stored in the header. There is a "FRAME" key, but it's different header['FIELD'] = _field - gain, darkVariance = get_SDSS_gain_and_darkVariance(_flt, _run, _camcol) + #retrieve gain and darkVariance from another online table + q = SDSS.query_sql(f'SELECT DISTINCT gain_{_flt}, darkVariance_{_flt} FROM field WHERE run={_run} AND camcol={_camcol}') + gain = q[f'gain_{_flt}'][0] + darkVariance = q[f'darkVariance_{_flt}'][0] #im[2] contains the sky - sky_image = SDSS_sky(_im[2]) #in ADU + sky, x, y = _im[2].data[0] + x_old = np.arange(np.shape(sky)[0]) + y_old = np.arange(np.shape(sky)[1]) + + # `interp2d` is deprecated in SciPy 1.10 and will be removed in SciPy 1.14.0 + # se We are using `interpolate.RegularGridInterpolator` instead + # NOTE: `interp2d` has x and y inverted but doesn't need a `meshgrid` to produce the output + # This is the code if `interp2d` is still preferred despite of its deprecation + # tck = interpolate.interp2d(y_old, x_old, sky, kind=method) + # sky_image = tck(x, y) + + tck = interpolate.RegularGridInterpolator((x_old,y_old), sky, method="linear", bounds_error=False, fill_value = None) + + # need to invert x and in y to produce the meshgrid with the right shape + x_grid, y_grid = np.meshgrid(y, x, indexing='ij') + + sky_image = tck((x_grid, y_grid)) #in ADU #im[1] contains a row with factors nanomaggies per counts for each column calib_image = np.asarray([_im[1].data.tolist()]*np.shape(_im[0].data)[0]) @@ -354,55 +513,21 @@ def elaborate_SDSS(_im, _flt, _run, _camcol, _field): return sky_subtracted_original_image_hdu, weight_image_hdu - - - -def SDSS_sky(_hdu, method="linear"): - ''' - The function converts the second entry of a hdu SDSS frame (sky) to a good - sky array. - - *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* - :param _hdu: HDUList from SDSS database - :type _hdu: astropy.io.fits.hdu.hdulist.HDUList - - :param method: Method of interpolation. In can be "linear" (default) - :type method: str - :default: "linear" - - *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* - :return: Sky array, in ADUs. - :rtype: numpy.ndarray - ''' - - from scipy import interpolate - - sky, x, y = _hdu.data[0] - xold = np.arange(np.shape(sky)[0]) - yold = np.arange(np.shape(sky)[1]) - - # `interp2d` is deprecated in SciPy 1.10 and will be removed in SciPy 1.14.0 - # NOTE: `interp2d` has x and y inverted but doesn't need a `meshgrid` to produce the output - # This is the code if `interp2d` is still preferred despite of its deprecation - # tck = interpolate.interp2d(yold, xold, sky, kind=method) - # return tck(x, y) - - tck = interpolate.RegularGridInterpolator((xold,yold), sky, method=method, bounds_error=False, fill_value = None) - - # need to invert x and in y to produce the meshgrid with the right shape - x_grid, y_grid = np.meshgrid(y, x, indexing='ij') - - return tck((x_grid, y_grid)) - - -def generate_FOV_grid(_center, _fov, _step = None, circle=True, wcs = None): +def generate_FOV_grid(_center, _fov, _step, circle=True, wcs = None): ''' The function generates a grid of points, each distant :_step: to each other. - The construction follows a recursive hexagon ring, that becomes bigger and - bigger until it encompass the whole FOV. It takes advantage of equilateral - triangles properties, where the height to the top vertex is sqrt(3)/2 times - the side of the triangle. + The construction follows a recursive hexagonal ring, that becomes bigger and + bigger until it encompass the whole FOV. It takes advantage of the class + `~astropy.visualization.wcsaxes.SphericalCircle`, which creates a circle that + is formed of all the points that are within a certain angle of the central + coordinates on a sphere. Each hexagonal ring is built on top of these circles. + + If :_step: is bigger than half of the fov size, then the grid would only have + the central point. This can cause the FOV to not be fully covered by the + template tiles. To fix this, in this case the grid will be just the central + point plus an octagon with a diagonal equal to half of the diagonal of the + FOV. The octagon is chosen to account for any rotation of the FOV. If :circle: is True (default), the grid will trimmed to a circle with radius equal to :_fov: @@ -411,26 +536,26 @@ def generate_FOV_grid(_center, _fov, _step = None, circle=True, wcs = None): *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* :param _center: Coordinates of the center point of the grid - :type _center: astropy.coordinates.sky_coordinate.SkyCoord + :type _center: `~astropy.units.Quantity` ['angle'] :param _fov: size of the FOV to tile with the grid. This assumes a square FOV. This quantity effectively translates to the length of the apothem of outermost hexagon, which will be equal to half of the diagonal of the square FOV. - :type _fov: astropy.units.quantity.Quantity + :type _fov: `~astropy.units.Quantity` ['angle'] :param _step: distance of each point in the grid. If a value is not provided, it will default to half :_fov: - :type _fov: astropy.units.quantity.Quantity + :type _step: `~astropy.units.Quantity` ['angle'] :param circle: If True, trims the grid to a circle with radius :_fov: - :type: bool + :type circle: bool :default: True :param wcs: WCS information. If provided the points outside of it will be removed - :type wcs: astropy.wcs.wcs.WCS or None - :default: None + :type wcs: `~astropy.wcs.WCS` or None + :default wcs: None *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* :return: List of points of the grid. Each element is a astropy SkyCoord @@ -438,66 +563,48 @@ def generate_FOV_grid(_center, _fov, _step = None, circle=True, wcs = None): ''' - import math, itertools - - - if _step == None: - _step = _fov/3 + from astropy.visualization.wcsaxes import SphericalCircle points = [[_center, 0*u.deg]] - # Assuming a square FOV of side A, its diagonal is A*sqrt(2). Therefore an - # hexagon will have to have a side of A*sqrt(2)/sqrt(3) in order to contain - # it all, given any rotation. - - for ring in range(1, math.ceil(_fov/_step*np.sqrt(2)/np.sqrt(3))): - # Moving `ring*_step` E from center to start the new hexagon - p = SkyCoord(_center.ra+_step*ring/np.cos(_center.dec.rad), _center.dec) - points.append([p, p.separation(_center)]) - - # The following `for` loops are written in a fancy way. Since we don't need to store - # the iterator variable, and we just need to repeat the same operation N times, we - # can use `itertools.repeat`, which doesn't need to build and return a list, storing - # the index as well, like range, or xrange would do. This is effectively the fastest - # way to iterate N times in Python. - # A more readable and common way of writing this would be - # `for _ in range(N):` - # And can be written like this if preferred, with negligible loss in performance. - # The number N of points along each hexagon side is exactly the Nth ring, so in our - # case N=`ring` - - # The most distant points from the center are the most important, so I store the - # distance to the center as well, to be able to sorted them later. - - #`p` it's always the previous point to move from - - # Turning 60 degrees and starting moving NW - for _ in itertools.repeat(None, ring): - p = SkyCoord(p.ra-_step/2/np.cos(p.dec.rad), p.dec+_step*np.sqrt(3)/2) - points.append([p, p.separation(_center)]) - # Turning 120 degrees and starting moving W - for _ in itertools.repeat(None, ring): - p = SkyCoord(p.ra-_step/np.cos(p.dec.rad), p.dec) - points.append([p, p.separation(_center)]) - # Turning 120 degrees and starting moving SW - for _ in itertools.repeat(None, ring): - p = SkyCoord(p.ra-_step/2/np.cos(p.dec.rad), p.dec-_step*np.sqrt(3)/2) - points.append([p, p.separation(_center)]) - # Turning 120 degrees and starting moving SE - for _ in itertools.repeat(None, ring): - p = SkyCoord(p.ra+_step/2/np.cos(p.dec.rad), p.dec-_step*np.sqrt(3)/2) - points.append([p, p.separation(_center)]) - # Turning 120 degrees and starting moving E - for _ in itertools.repeat(None, ring): - p = SkyCoord(p.ra+_step/np.cos(p.dec.rad), p.dec) - points.append([p, p.separation(_center)]) - # Turning 120 degrees and starting moving NE - for _ in itertools.repeat(None, ring): - p = SkyCoord(p.ra+_step/2/np.cos(p.dec.rad), p.dec+_step*np.sqrt(3)/2) + # If the the step is too big, we will take an octagonal ring + if _step > _fov/2: + vertexes = SphericalCircle(_center, radius = _fov*np.sqrt(2)/2, resolution=8).get_verts() + # :SphericalCircle: will produce a Polygon, so the first and the last + # point will coincide. Therefore we need to remove the last vertex. + for v in vertexes[:-1]: + p = SkyCoord(*v, unit=u.deg) points.append([p, p.separation(_center)]) + else: + # A square FOV of side A, its diagonal is A*sqrt(2). Therefore an + # hexagon will have to have a diagonal of A*sqrt(2)/sqrt(3) in order to contain + # it all, given any rotation. We add an extra order just to be sure + + for ring in range(1, math.ceil(_fov/_step*np.sqrt(2)/np.sqrt(3))+1): + current_hexagon = [] + vertexes = SphericalCircle(_center, radius = _step*ring, resolution=6).get_verts() + for v in vertexes: + p = SkyCoord(*v, unit=(u.deg,u.deg)) + current_hexagon.append([p, p.separation(_center)]) + #Filling the sides of the hexagon with points + for side in range(6): + # We need to split the side on the nth hexagon + # in n segments + for i in range(1, ring): + a = i / ring # rescale 0 < i < n --> 0 < a < 1 + new_point = (1 - a) * current_hexagon[side][0].cartesian + a * current_hexagon[side+1][0].cartesian # interpolate cartesian coordinates + new_point = SkyCoord(new_point) + current_hexagon.append([new_point, new_point.separation(_center)]) + # :SphericalCircle: will produce a Polygon, so the first and the last + # point will coincide. Therefore we need to remove the last vertex of + # the current hexagon. + current_hexagon.pop(6) + points = points + current_hexagon + if circle: - points = [p for p in points if p[1] <= _fov/2*np.sqrt(2)] + #adding 5 arcsec to give an extra buffer + points = [p for p in points if p[1] <= (_fov/2*np.sqrt(2))+5*u.arcsec] if wcs: points = [p for p in points if p[0].contained_by(wcs)] @@ -512,69 +619,108 @@ def generate_FOV_grid(_center, _fov, _step = None, circle=True, wcs = None): return [p[0] for p in points] -def reproject_and_combine(_imglist, _weightlist):#, _coord): +def reproject_and_combine(_imglist, _weightlist, _instrument): + ''' + The name of the function is quite self explanatory. + It uses the `reproject` package to match the resolution + of :_instrument:, and to merge all the tiles + into the final template. It produces also a weight + image. + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :param _imglist: List of hdus with the images to merge + :type _imglist: list + + :param _weightlist: List of hdus with the weights of the images + to merge + :type _weightlist: list + + :param _instrument: Name of the instrument. The string needs to + be a valid key in :RESOLUTION: + :type _instrument: string + + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :return: Image data, Weight data, Header with optimal WCS + :rtype: list + ''' from reproject.mosaicking import find_optimal_celestial_wcs from reproject import reproject_interp from reproject.mosaicking import reproject_and_coadd - wcs_out, shape_out = find_optimal_celestial_wcs(_imglist, resolution=RESOLUTION) + wcs_out, shape_out = find_optimal_celestial_wcs(_imglist, resolution=RESOLUTION[_instrument]) array, footprint = reproject_and_coadd(_imglist, wcs_out, shape_out=shape_out, reproject_function=reproject_interp, input_weights = _weightlist, combine_function='mean') return array, footprint, wcs_out.to_header() - # import numpy as np - # import matplotlib.pyplot as plt - - # plt.figure(figsize=(10, 8)) - # ax1 = plt.subplot(1, 2, 1, projection=wcs_out) - # im1 = ax1.imshow(array, origin='lower', vmin=600, vmax=800) - # ax1.set_title('Mosaic') - # ax2 = plt.subplot(1, 2, 2, projection=wcs_out) - # im2 = ax2.imshow(footprint, origin='lower') - # ax2.set_title('Footprint') - # ax1.plot([49.13674189,49.13062658,48.53523662,48.54543162]*u.deg, [ 41.39245501, 41.83609281,41.82997431, 41.38637844]*u.deg, color = 'r', lw=3, transform=ax1.get_transform("world")) - # ax2.plot([49.13674189,49.13062658,48.53523662,48.54543162]*u.deg, [ 41.39245501, 41.83609281,41.82997431, 41.38637844]*u.deg, color = 'r', lw=3, transform=ax1.get_transform("world")) - - - #grid = generate_FOV_grid(_coord, FOV) - - # for p in grid: - # ax1.plot(p.ra, p.dec, marker='o', color='r', ms=10, transform=ax1.get_transform("world")) - # ax2.plot(p.ra, p.dec, marker='o', color='r', ms=10, transform=ax2.get_transform("world")) - - # plt.show() - - # pf.writeto('test_combine.fits', array, wcs_out.to_header(), overwrite=True) - # pf.writeto('test_footprint.fits', footprint, wcs_out.to_header(), overwrite=True) - - -# lco_fits = pf.open('/Users/giacomoterreran/lco/data/snexdata_target7856/tfn1m014-fa20-20231005-0153-e91.fits') -# wcs = WCS(lco_fits[0].header) -# footprint = wcs.calc_footprint() -# target_coord = SkyCoord(lco_fits[0].header['RA'], lco_fits[0].header['DEC'], unit=(u.hourangle, u.deg)) +def trim_nan(_hdu): + ''' + When downloading an image from PS1, the image is padded with Nan. + This function, trim the image, removing the np.nan array, and preserving + the WCS. -# import numpy as np -# import matplotlib.pyplot as plt + NOTE: the flux of the full skycell stack images are scaled. Astropy + rescale the data automatically as soon as the data attribute of the + hdu is accessed. See https://docs.astropy.org/en/stable/io/fits/usage/image.html + for more details. -# im = pf.open('/Users/giacomoterreran/lco/data/data/extdata/O4/PS1/SN2023rbk_g.fits') + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :param _hdu: HUDList containing the data to trim + :type _hdu: `~astropy.io.fits.HDUList` + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :return: HDUList with edges trimmed + :rtype: `~astropy.io.fits.HDUList` -# plt.figure(figsize=(10, 8)) -# ax1 = plt.subplot(1, 1, 1, projection=WCS(im[0])) -# im1 = ax1.imshow(im[0].data, origin='lower', vmin=600, vmax=800) -# ax1.plot([49.13674189,49.13062658,48.53523662,48.54543162]*u.deg, [ 41.39245501, 41.83609281,41.82997431, 41.38637844]*u.deg, color = 'r', lw=3, transform=ax1.get_transform("world")) + ''' + from astropy.nddata import Cutout2D -# grid = generate_FOV_grid(target_coord, FOV) + # Getting all the columns with only np.nan in them + good_cols = np.all(np.isnan(_hdu.data),axis=1) -# for p in grid: -# ax1.plot(p.ra, p.dec, marker='o', color='r', ms=10, transform=ax1.get_transform("world")) + # Retrieving the first and the last index of good_cols == False, + # i.e. the first and last columns containing (at least partially) + # real data + for i in range(len(good_cols)): + if not good_cols[i]: + # Adding 10 pixel padding to account for edge artifact + first_col = i+10 + break + + for i in range(len(good_cols)-1,-1,-1): + if not good_cols[i]: + # We want to include the index, so we add 1, butting the + # edge right after the column. Subtracting 10 pixel padding + # to account for edge artifact. + last_col = i+1-10 + break + + + # Doing the same with the rows + good_rows = np.all(np.isnan(_hdu.data),axis=0) + for i in range(len(good_rows)): + if not good_rows[i]: + first_row = i+10 + break + + for i in range(len(good_rows)-1,-1,-1): + if not good_rows[i]: + last_row = i+1-10 + break + + size_x = last_col-first_col + size_y = last_row-first_row + cen_x = math.floor((last_col+first_col)/2.) + cen_y = math.floor((last_row+first_row)/2.) + + # No idea why x and y are inverted between the center and the size + # ¯\_(ツ)_/¯ + cut = Cutout2D(_hdu.data, (cen_y,cen_x), (size_x,size_y), wcs=WCS(_hdu.header)) -# plt.show() -# exit() + out_hdu = pf.PrimaryHDU() + out_hdu.data = cut.data + out_hdu.header = _hdu.header.copy() + out_hdu.header.update(cut.wcs.to_header()) -target_coord = SkyCoord(13.576921, -40.392550, unit=(u.deg, u.deg)) -s = survey_request('at2023pcw',target_coord , 'gri') -#s = survey_request(lco_fits[0].header['OBJECT'], target_coord, 'gri') -#s.search_for_SDSS() -s.search_for_PS1() -#s.search_for_DECam() + return out_hdu From 9d2eb93016fb4266d846946f608f79adae0e78fb Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Tue, 14 May 2024 09:03:07 -0700 Subject: [PATCH 10/21] Full PanSTARRS and SDSS template retrieval complete. --- gw/tests.py | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 gw/tests.py diff --git a/gw/tests.py b/gw/tests.py deleted file mode 100644 index 7ce503c2..00000000 --- a/gw/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. From 3b5d01020f7c8c9291e10c8e117aeb43a21893b5 Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Mon, 20 May 2024 12:45:32 -0700 Subject: [PATCH 11/21] Restructured scripts --- gw/hooks.py | 15 +- gw/survey_queries.py | 726 -------------------------- gw/survey_queries/DECam_search.py | 92 ++++ gw/survey_queries/PanSTARRS_search.py | 217 ++++++++ gw/survey_queries/SDSS_search.py | 179 +++++++ gw/survey_queries/Skymapper_search.py | 50 ++ gw/survey_queries/__init__.py | 144 +++++ gw/survey_queries/query.py | 180 +++++++ 8 files changed, 872 insertions(+), 731 deletions(-) delete mode 100644 gw/survey_queries.py create mode 100644 gw/survey_queries/DECam_search.py create mode 100644 gw/survey_queries/PanSTARRS_search.py create mode 100644 gw/survey_queries/SDSS_search.py create mode 100644 gw/survey_queries/Skymapper_search.py create mode 100644 gw/survey_queries/__init__.py create mode 100644 gw/survey_queries/query.py diff --git a/gw/hooks.py b/gw/hooks.py index 31bb76be..8d322910 100644 --- a/gw/hooks.py +++ b/gw/hooks.py @@ -9,6 +9,9 @@ import logging from django.conf import settings import survey_queries +from astropy.coordinates import SkyCoord +from astropy import units as u +from survey_queries.query import template_query logger = logging.getLogger(__name__) @@ -102,12 +105,14 @@ def ingest_gw_galaxy_into_snex1(target_id, event_id, wrapped_session=None): snex2_target = Target.objects.get(id=target_id) ra0 = snex2_target.ra dec0 = snex2_target.dec + objname = snex2_target.objname - templ = survey_queries.survey_request(ra0, dec0, 'gri') - templ.search_for_PS1_urls() - templ.search_for_Skymapper_urls() - templ.search_for_DECam_urls() - templ.fetch_urls() + #I'm not sure this is a Skycoord object. If it is, remove the following line + target_coord = SkyCoord(ra0,dec0, unit=(u.deg, u.deg)) + + templ = template_query(objname,target_coord , 'g', ['sinistro','muscat']) + templ.search_for_PS1() + templ.search_for_SDSS() db_session.add(o4_galaxies(targetid=target_id, event_id=event_id, ra0=ra0, dec0=dec0, **templ.templates_paths)) diff --git a/gw/survey_queries.py b/gw/survey_queries.py deleted file mode 100644 index 0f701c0f..00000000 --- a/gw/survey_queries.py +++ /dev/null @@ -1,726 +0,0 @@ -# -*- coding: utf-8 -*- -"""survey_queries.py - -""" - -from astropy.table import Table -from astropy import units as u -import astropy.io.fits as pf -from astropy.time import Time -from astroquery.sdss import SDSS -from astropy.coordinates import SkyCoord -import astropy.units as u -import os, math -import numpy as np -from astropy.wcs import WCS - -# Folder definitions -OUT_FOLDER = os.getenv('GWTEMPLATES_FOLDER') -#OUT_FOLDER = '/supernova/data/extdata/GW_templates/O4' - -# PS1 urls -PS1_TAB = 'https://ps1images.stsci.edu/cgi-bin/ps1filenames.py' - -# PS1 provides a cutout API. We probably prefer to use the whole -# skycell, but we can easily change that with this bool variable -SKYCELL = True -if SKYCELL: - PS1_CUT = 'https://ps1images.stsci.edu/' -else: - PS1_CUT = 'https://ps1images.stsci.edu/cgi-bin/fitscut.cgi' - -# Specifications for different instruments -# The FOV here is thought as the diameter of a circle that would circumscribe the LCO image. -# This is effectively bigger than the actual FOV of each instrument, but it will guarantee -# that the full FOV is covered given any rotation -INSTRUMENT_LIST = ['sinistro', 'muscat', 'qhy'] -#FOV = {'sinistro': 26.5*np.sqrt(2)*u.arcmin, 'muscat': 9.1*np.sqrt(2)*u.arcmin, 'qhy': np.sqrt(1.9**2 + 1.2**2)*u.deg} -# The FOV of 'qhy' is rectangular, se we calculate the side of square having the same diagonal. -FOV = {'sinistro': 26.5*u.arcmin, 'muscat': 9.1*u.arcmin, 'qhy': np.sqrt((1.9**2 + 1.2**2)/2)*u.deg} -RESOLUTION = {'sinistro':0.389*u.arcsec, 'muscat': 0.27*u.arcsec, 'qhy':0.74*u.arcsec} - -SURVEYS_SKYCELL_SIZES = {'PS1': 0.4*u.deg, 'SDSS': 10*u.arcmin} - - - -class survey_request: - ''' - This class will query the surveys looking for the optical templates. - Each function is associated with a survey. First it will check if - the coordinates are in the footprint of that survey. Each function - will return True or False accordingly. In addition, if the coordinates - are indeed in the footprint, then the function will store the hdu of - the template in the 'hdu' attribute. - - ''' - - def __init__(self, _obj, _coord, _filters, _inst): - ''' - Initializing the class with the name of the object :_obj:, - the coordinates :_coord: and the filters :_filters: to search. - The template images will be resampled to match the resolution - of the instrument (or list of instruments) provided by :_inst:. - The template FOV will also be as big as to include the whole - FOV of that instrument. - The empty dictionary :templates_paths: is also created. - - *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* - :_obj: Name of the object, mainly for file naming purposes - :type _obj: string - - :_coord: coordinates of the object, corresponding to the center - of the search - :type _coord: astropy.coordinates.sky_coordinate.SkyCoord - - :_filters: filters to search. If searching for multiple filters - use a single long string, e.g. 'gri' - :type _filters: string - - :_inst: instrument name, or list of instrument names. In order - to be valid, the name will be checked against the global - variable :INSTRUMENT_LIST:, defined above. - :type _inst: string or list - - ''' - self.obj = _obj - self.coord = _coord - self.filters = _filters - - # Making the :instruments: list and checking the instruments exist. - if isinstance(_inst, str): - if _inst == 'all': - self.instruments = INSTRUMENT_LIST - else: - self.instruments = [_inst] - else: - self.instruments = _inst - for i in self.instruments: - if i not in INSTRUMENT_LIST: - raise ValueError(f'Instrument `{i}` does not appear to be currently supported. If this is a new instrument, you will need to included it in the INSTRUMENT_LIST, as well as update the FOV and RESOLUTION dictionaries.') - - # Sorting the instruments in decreasing FOV size. - self.instruments.sort(key = lambda x: FOV[x], reverse=True) - - self.templates_paths = {} - - - # Defining a decorator. Because decorators are cool - def set_survey(_survey): - ''' - This decorator set the name of survey for the following function, through the - :_survey: variable, and initialize the :self.templates_paths: dictionary with - a '--' string for that specific survey. The idea is that by initializing the - dictionary, there will be a record that the search has been performed. - Otherwise the dictionary will not have any entry for that survey. - - *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* - :_survey: Name of the survey - :type _center: string - - *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* - :return: Decorated function - :rtype: function - - ''' - def decorate(function): - def initialize_survey(self): - # Initializing all the filters - for flt in self.filters: - for inst in self.instruments: - self.templates_paths[f'{inst}_{_survey}_{flt}'] = '--' - print(f'Searching for {_survey} templates.\n') - return function(self, _survey) - return initialize_survey - return decorate - - -####### ---PS1--- ####### - @set_survey('PS1') - def search_for_PS1(self, survey): - ''' - This function searches and downloads PS1 templates. - Together with the data, it will fetch also the weight images (i.e. 1/variance) - and the mask images. - - ''' - - # Instruments are sorted by FOV size. - # :self.instruments[0]: is the instrument with the biggest FOV - # and therefore requires the biggest template. We start creating - # this, and instead of repeating the procedure, we just scale down - # this first template to match the smaller requirements. - inst = self.instruments[0] - - # Generate grid of points covering the FOV - grid = generate_FOV_grid(_center = self.coord, _fov = FOV[inst], _step=SURVEYS_SKYCELL_SIZES[survey]) - - # NOTE: wt are actually variance maps - data_lists = {'stack':{}, 'mask':{}, 'wt':{}} - - while True: - # Initially :grid[0]: will be the center of the field. - # After each iteration, the :grid: array is trimmed down - # and its first element will be the point on the grid - # most distant from the center - - # Creating the url to query the PS1 database. - # NOTE: PS1 needs the size in pixels, considering 0.25 arcsec/pixel. - # This first query returns a Table with a list of files - print(f'Querying the PS1 database for coordinates {grid[0].ra.deg} {grid[0].dec.deg}.') - tableurl = f'{PS1_TAB}?ra={grid[0].ra.deg}&dec={grid[0].dec.deg}&size={FOV[inst].to(u.arcsec).value/0.25}&format=fits&filters={self.filters}&type=stack,stack.wt,stack.mask' - table = Table.read(tableurl, format='ascii') - - # If the table is empty, the images are not in PS1 - if len(table) == 0: - print(f'\nThe provided coordinates {grid[0].ra.deg},{grid[0].dec.deg} do not appear to be in the PS1 footprint.\n') - return - - print('Coordinates in PS1 footprint.') - for tab in table: - flt = tab['filter'] - filename = tab['filename'] - filetype = tab['type'].split('.')[-1] - - # Initializing dictionaries - if flt not in data_lists[filetype]: - data_lists[filetype][flt]=[] - data_lists[filetype][flt]=[] - data_lists[filetype][flt]=[] - - if SKYCELL: - url = f'{PS1_CUT}{filename}' - else: - url = f'{PS1_CUT}?ra={grid[0].ra.deg}&dec={grid[0].dec.deg}&size=6000&format=fits&red={filename}' - - print(f'Retrieving file {filename}') - with pf.open(url) as hdulist: - # The skycell headers need some adjustments - if SKYCELL: - #hdulist[1].header['TIMESYS'] = 'TAI' - hdulist[1].header['RADESYSa'] = 'FK5' - hdulist[1].header['DATE-OBS'] = 'T'.join(Time(hdulist[1].header['MJD-OBS'], format='mjd', scale='utc').iso.split()) - hdulist[1].header['PC1_1'] = hdulist[1].header['PC001001'] - hdulist[1].header['PC1_2'] = hdulist[1].header['PC001002'] - hdulist[1].header['PC2_1'] = hdulist[1].header['PC002001'] - hdulist[1].header['PC2_2'] = hdulist[1].header['PC002002'] - del hdulist[1].header['PC001001'] - del hdulist[1].header['PC001002'] - del hdulist[1].header['PC002001'] - del hdulist[1].header['PC002002'] - - # The algorithm for decompressing fits file data - # doesn't work on PS1 skycells since astropy version 5.3 - # The easiest way to fix this, is to remove the - # 'ZBLANK' key (assuming it's equal to 'BLANK') - if 'ZBLANK' in hdulist[1].header: - del hdulist[1].header['ZBLANK'] - - # The image flux scaling is also non-standard for the stack images - if filetype != 'mask': - hdulist[1].data = hdulist[1].header['BOFFSET'] + hdulist[1].header['BSOFTEN'] * (10**(0.4*hdulist[1].data) - 10**(-0.4*hdulist[1].data)) - del hdulist[1].header['BLANK'] - - # Getting rid of the edge nans - fixed_hdu = trim_nan(hdulist[1]) - else: - fixed_hdu = trim_nan(hdulist[0]) - - del fixed_hdu.header['RADESYS'] - - # Since the weight images are actually variance maps, we need - # to take the reciprocal of it - if filetype == 'wt': - fixed_hdu.data = 1/fixed_hdu.data - - data_lists[filetype][flt].append(fixed_hdu) - - for skycell in data_lists['stack'][self.filters[0]]: - #trimming down the grid, leaving the points currently not in any skycell - grid = [p for p in grid if not p.contained_by(WCS(skycell.header))] - - if len(grid) == 0: - break - - # We now trim the list of skycells according to each instrument FOV - # For the first instrument this step should be redundant since we - # selected the skycell in order to tile the FOV already. - for inst in self.instruments: - new_grid = generate_FOV_grid(_center = self.coord, _fov = FOV[inst], _step=SURVEYS_SKYCELL_SIZES[survey]) - for flt in data_lists['stack']: - # Since enumerate() returns a generator and generators can't be reversed, - # we need to convert it to a list first - for skycell_idx, skycell in reversed(list(enumerate(data_lists['stack'][flt]))): - contained = False - for p in new_grid: - if p.contained_by(WCS(skycell.header)): - contained = True - break - # If no point of the grid is contained by the skycell, - # drop it from all the data lists - if not contained: - for filetype in data_lists: - data_lists[filetype][flt].pop(skycell_idx) - - print(f'Reprojecting and combining skycells of filer {flt} for {inst}.') - array, footprint, header = reproject_and_combine(data_lists['stack'][flt], data_lists['wt'][flt], inst) - - # Keeping track of the single skycells that are being combined. - header['NSKYCELL'] = (len(data_lists['stack'][flt]), 'Number of SDSS skycells merged') - for i,im in enumerate(data_lists['stack'][flt], start=1): - header[f'STK_TY{i:>02}'] = (im.header['STK_TYPE'], f'type of stack {i}') - header[f'STK_ID{i:>02}'] = (im.header['STK_ID'], f'type of stack {i}') - header[f'SKYCEL{i:>02}'] = (im.header['SKYCELL'], f'type of stack {i}') - header[f'TES_ID{i:>02}'] = (im.header['TESS_ID'], f'type of stack {i}') - - # Populating the the header with the keys needed by the pipeline - header['RA'] = self.coord.ra.deg - header['DEC'] = self.coord.dec.deg - header['CAT-RA'] = self.coord.ra.deg - header['CAT-DEC'] = self.coord.dec.deg - header['MJD-OBS'] = (np.mean([im.header['MJD-OBS'] for im in data_lists['stack'][flt]]), 'Average MJD') - header['DATE-OBS'] = 'T'.join(Time(header['MJD-OBS'], format='mjd', scale='utc').iso.split()) - header['DAY-OBS'] = header['DATE-OBS'].split('T')[0].replace('-', '') - header['TELESCOP'] = 'PS1' - header['INSTRUME'] = 'GPC1' - header['OBJECT'] = self.obj - header['FILTER'] = flt+'p' # NOTE: this works only with griz - header['AIRMASS'] = 1 - header['WCSERR'] = 0 - header['PIXSCALE'] = (RESOLUTION[inst].value, 'arcsec/pixel') - header['BUNIT'] = ('counts', 'ADUs') - - # Writing out the final combined template - out_name = f'{OUT_FOLDER}/{survey}/{inst}_{self.obj}_{flt}' - self.templates_paths[f'{inst}_{survey}_{flt}'] = out_name+'.fits' - pf.writeto(out_name+'.fits', array, header, overwrite=True) - - # Hotpants needs the variance, so we need to write out 1/footprint - header['BUNIT'] = ('counts2', 'ADUs^2') - pf.writeto(out_name+'.var.fits', 1/footprint, header, overwrite=True) - - - - -####### ---SDSS--- ####### - @set_survey('SDSS') - def search_for_SDSS(self, survey): - ''' - This function searches and downloads SDSS templates. - Together with the data, it will fetch also the variance images - - ''' - - # Instruments are sorted by FOV size. - # :self.instruments[0]: is the instrument with the biggest FOV - # and therefore requires the biggest template. We start creating - # this, and instead of repeating the procedure, we just scale down - # this first template to match the smaller requirements. - inst = self.instruments[0] - - # Generate grid of points covering the FOV - grid = generate_FOV_grid(_center = self.coord, _fov = FOV[inst], _step=SURVEYS_SKYCELL_SIZES[survey]) - - all_pointings = [] - - img_data_list = {} - weight_data_list = {} - - for f,flt in enumerate(self.filters): - - img_data_list[flt] = [] - weight_data_list[flt] = [] - - # For the first filter, we need to collect the references for all the required SDSS fields - # We don't need to repeat the query for the following filters - # The SDSS fields references are stored in :all_pointings: - if f == 0: - while True: - # SDSS queries radius must be less than 3.0 arcmin. - # Initially :grid[0]: will be the center of the field. - # After each iteration, the :grid: array is trimmed down and its first element will be - # the point on the grid most distant from the center - print(f'Querying the SDSS database for coordinates {grid[0].ra.deg} {grid[0].dec.deg}.') - xid = SDSS.query_region(grid[0], radius=3*u.arcmin, spectro=False) - if xid is None: - print(f'\nThe provided coordinates {grid[0].ra.deg},{grid[0].dec.deg} do not appear to be in the SDSS footprint.\n') - return - - print('Coordinates in SDSS footprint.') - new_pointings = [list(el) for el in np.unique(xid['run','camcol','field']) if list(el) not in all_pointings] - - all_pointings = all_pointings + new_pointings - - for run, camcol, field in new_pointings: - #fetching the fits as an astropy hdu - print(f'Retrieving file with run={run}, camcol={camcol}, field={field} and filter {flt}.') - im = SDSS.get_images(run=run, camcol=camcol, field=field, band=flt, cache=True)[0] - - sky_subtracted_original_image_hdu, weight_image_hdu = elaborate_SDSS(im, flt, run, camcol, field) - img_data_list[flt].append(sky_subtracted_original_image_hdu) - weight_data_list[flt].append(weight_image_hdu) - - - for tile in img_data_list[flt]: - grid = [p for p in grid if not p.contained_by(WCS(tile.header))] - - if len(grid) == 0: - break - - else: - #Note that we changed the list to all_pointings now that we have them all - for run, camcol, field in all_pointings: - #fetching the fits as an astropy hdu - im = SDSS.get_images(run=run, camcol=camcol, field=field, band=flt, cache=True)[0] - - sky_subtracted_original_image_hdu, weight_image_hdu = elaborate_SDSS(im, flt, run, camcol, field) - img_data_list[flt].append(sky_subtracted_original_image_hdu) - weight_data_list[flt].append(weight_image_hdu) - - - # We now trim the list of skycells according to each instrument FOV - # For the first instrument this step should be redundant since we - # selected the skycell in order to tile the FOV already. - for inst in self.instruments: - new_grid = generate_FOV_grid(_center = self.coord, _fov = FOV[inst], _step=SURVEYS_SKYCELL_SIZES[survey]) - for flt in img_data_list: - # Since enumerate() returns a generator and generators can't be reversed, - # we need to convert it to a list first - for skycell_idx, skycell in reversed(list(enumerate(img_data_list[flt]))): - contained = False - for p in new_grid: - if p.contained_by(WCS(skycell.header)): - contained = True - break - # If no point of the grid is contained by the skycell, - # drop it from all the data lists - if not contained: - img_data_list[flt].pop(skycell_idx) - weight_data_list[flt].pop(skycell_idx) - - print(f'Reprojecting and combining skycells of filer {flt} for {inst}.') - array, footprint, header = reproject_and_combine(img_data_list[flt], weight_data_list[flt], inst) - - - - header['NTILES'] = (len(img_data_list[flt]), 'Number of SDSS tiles merged') - for i,im in enumerate(img_data_list[flt], start=1): - header[f'RUN{i:>03}'] = (im.header['RUN'], f'Run number of tile {i}') - header[f'CAMCO{i:>03}'] = (im.header['CAMCOL'], f'Column in the imaging camera of tile {i}') - header[f'FIELD{i:>03}'] = (im.header['FIELD'], f'Field number of tile {i}') - header[f'MJD{i:>03}'] = (im.header['MJD-OBS'], f'MJD of tile {i}') - - header['RA'] = self.coord.ra.deg - header['DEC'] = self.coord.dec.deg - header['CAT-RA'] = self.coord.ra.deg - header['CAT-DEC'] = self.coord.dec.deg - header['MJD-OBS'] = (np.mean([im.header['MJD-OBS'] for im in img_data_list[flt]]), 'Average MJD') - header['DATE-OBS'] = 'T'.join(Time(header['MJD-OBS'], format='mjd', scale='utc').iso.split()) - header['DAY-OBS'] = header['DATE-OBS'].split('T')[0].split('T')[0].replace('-', '') - header['TELESCOP'] = '2.5m' - header['INSTRUME'] = 'SDSS' - header['OBJECT'] = self.obj - header['FILTER'] = flt+'p' #note, this works only with griz - header['AIRMASS'] = 1 - header['WCSERR'] = 0 - header['PIXSCALE'] = (RESOLUTION[inst].value, 'arcsec/pixel') - header['BUNIT'] = ('counts', 'ADUs') - - out_name = f'{OUT_FOLDER}/{survey}/{inst}_{self.obj}_{flt}' - self.templates_paths[f'{inst}_{survey}_{flt}'] = out_name+'.fits' - pf.writeto(out_name+'.fits', array, header, overwrite=True) - - #hotpants needs the variance, so we need the write out 1/footprint - header['BUNIT'] = ('counts2', 'ADUs^2') - pf.writeto(out_name+'.var.fits', 1/footprint, header, overwrite=True) - - - -def elaborate_SDSS(_im, _flt, _run, _camcol, _field): - ''' - SDSS images require a little bit of manipulation. - This function will produce a ready to be used image - as well as its corresponding weight image. - - *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* - :param _im: SDSS image HDUList - :type _im: `~astropy.io.fits.HDUList` - - :param _flt: SDSS filter - :type _flt: string - - :param _run: Run number - :type _run: int - - :param _camcol: Column in the imaging camera - :type _camcol: int - - :param _field: Field number - :type _field: int - - *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* - :return: HDU of the image, HDU of the weight image - :rtype: list - - ''' - - from scipy import interpolate - - header = _im[0].header - - #Although we do not write out each single tile, this edits in the header will - #make it easier to keep track of each tile in the merged fits file - header['RADESYSa'] = header['RADECSYS'] - del header['RADECSYS'] - header['MJD-OBS'] = Time(f"{header['DATE-OBS']} {header['TAIHMS']}", format='iso', scale='tai').mjd - header['DATE-OBS'] = 'T'.join(Time(header['MJD-OBS'], format='mjd', scale='utc').iso.split()) - #For some reason the field info is not stored in the header. There is a "FRAME" key, but it's different - header['FIELD'] = _field - - #retrieve gain and darkVariance from another online table - q = SDSS.query_sql(f'SELECT DISTINCT gain_{_flt}, darkVariance_{_flt} FROM field WHERE run={_run} AND camcol={_camcol}') - gain = q[f'gain_{_flt}'][0] - darkVariance = q[f'darkVariance_{_flt}'][0] - - #im[2] contains the sky - sky, x, y = _im[2].data[0] - x_old = np.arange(np.shape(sky)[0]) - y_old = np.arange(np.shape(sky)[1]) - - # `interp2d` is deprecated in SciPy 1.10 and will be removed in SciPy 1.14.0 - # se We are using `interpolate.RegularGridInterpolator` instead - # NOTE: `interp2d` has x and y inverted but doesn't need a `meshgrid` to produce the output - # This is the code if `interp2d` is still preferred despite of its deprecation - # tck = interpolate.interp2d(y_old, x_old, sky, kind=method) - # sky_image = tck(x, y) - - tck = interpolate.RegularGridInterpolator((x_old,y_old), sky, method="linear", bounds_error=False, fill_value = None) - - # need to invert x and in y to produce the meshgrid with the right shape - x_grid, y_grid = np.meshgrid(y, x, indexing='ij') - - sky_image = tck((x_grid, y_grid)) #in ADU - - #im[1] contains a row with factors nanomaggies per counts for each column - calib_image = np.asarray([_im[1].data.tolist()]*np.shape(_im[0].data)[0]) - - sky_subtracted_original_image = _im[0].data / calib_image #in ADU - - variance_image = ((_im[0].data / calib_image + sky_image) / gain + darkVariance)# in ADU * calib_image **2 - - sky_subtracted_original_image_hdu = pf.PrimaryHDU(sky_subtracted_original_image, header=header) - #to reproject and combine, we just need the weight, not the variance - weight_image_hdu = pf.PrimaryHDU(1/variance_image, header=header) - - return sky_subtracted_original_image_hdu, weight_image_hdu - - -def generate_FOV_grid(_center, _fov, _step, circle=True, wcs = None): - ''' - The function generates a grid of points, each distant :_step: to each other. - The construction follows a recursive hexagonal ring, that becomes bigger and - bigger until it encompass the whole FOV. It takes advantage of the class - `~astropy.visualization.wcsaxes.SphericalCircle`, which creates a circle that - is formed of all the points that are within a certain angle of the central - coordinates on a sphere. Each hexagonal ring is built on top of these circles. - - If :_step: is bigger than half of the fov size, then the grid would only have - the central point. This can cause the FOV to not be fully covered by the - template tiles. To fix this, in this case the grid will be just the central - point plus an octagon with a diagonal equal to half of the diagonal of the - FOV. The octagon is chosen to account for any rotation of the FOV. - - If :circle: is True (default), the grid will trimmed to a circle with radius - equal to :_fov: - - If :wcs: is provided, the points outside of the provided WCS are removed. - - *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* - :param _center: Coordinates of the center point of the grid - :type _center: `~astropy.units.Quantity` ['angle'] - - :param _fov: size of the FOV to tile with the grid. This assumes a square - FOV. This quantity effectively translates to the length of the - apothem of outermost hexagon, which will be equal to half of - the diagonal of the square FOV. - :type _fov: `~astropy.units.Quantity` ['angle'] - - :param _step: distance of each point in the grid. If a value is not - provided, it will default to half :_fov: - :type _step: `~astropy.units.Quantity` ['angle'] - - :param circle: If True, trims the grid to a circle with radius :_fov: - :type circle: bool - :default: True - - :param wcs: WCS information. If provided the points outside of it will - be removed - :type wcs: `~astropy.wcs.WCS` or None - :default wcs: None - - *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* - :return: List of points of the grid. Each element is a astropy SkyCoord - :rtype: list - - ''' - - from astropy.visualization.wcsaxes import SphericalCircle - - points = [[_center, 0*u.deg]] - - # If the the step is too big, we will take an octagonal ring - if _step > _fov/2: - vertexes = SphericalCircle(_center, radius = _fov*np.sqrt(2)/2, resolution=8).get_verts() - # :SphericalCircle: will produce a Polygon, so the first and the last - # point will coincide. Therefore we need to remove the last vertex. - for v in vertexes[:-1]: - p = SkyCoord(*v, unit=u.deg) - points.append([p, p.separation(_center)]) - - else: - # A square FOV of side A, its diagonal is A*sqrt(2). Therefore an - # hexagon will have to have a diagonal of A*sqrt(2)/sqrt(3) in order to contain - # it all, given any rotation. We add an extra order just to be sure - - for ring in range(1, math.ceil(_fov/_step*np.sqrt(2)/np.sqrt(3))+1): - current_hexagon = [] - vertexes = SphericalCircle(_center, radius = _step*ring, resolution=6).get_verts() - for v in vertexes: - p = SkyCoord(*v, unit=(u.deg,u.deg)) - current_hexagon.append([p, p.separation(_center)]) - #Filling the sides of the hexagon with points - for side in range(6): - # We need to split the side on the nth hexagon - # in n segments - for i in range(1, ring): - a = i / ring # rescale 0 < i < n --> 0 < a < 1 - new_point = (1 - a) * current_hexagon[side][0].cartesian + a * current_hexagon[side+1][0].cartesian # interpolate cartesian coordinates - new_point = SkyCoord(new_point) - current_hexagon.append([new_point, new_point.separation(_center)]) - # :SphericalCircle: will produce a Polygon, so the first and the last - # point will coincide. Therefore we need to remove the last vertex of - # the current hexagon. - current_hexagon.pop(6) - points = points + current_hexagon - - if circle: - #adding 5 arcsec to give an extra buffer - points = [p for p in points if p[1] <= (_fov/2*np.sqrt(2))+5*u.arcsec] - - if wcs: - points = [p for p in points if p[0].contained_by(wcs)] - - # Sorting the list according to the distance, with the most distant points first - points.sort(key = lambda x: x[1], reverse=True) - - #Putting the center back at the beginning - points = [points.pop()] + points - - #Returning just the list of coordinates, without the distance - return [p[0] for p in points] - - -def reproject_and_combine(_imglist, _weightlist, _instrument): - ''' - The name of the function is quite self explanatory. - It uses the `reproject` package to match the resolution - of :_instrument:, and to merge all the tiles - into the final template. It produces also a weight - image. - - *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* - :param _imglist: List of hdus with the images to merge - :type _imglist: list - - :param _weightlist: List of hdus with the weights of the images - to merge - :type _weightlist: list - - :param _instrument: Name of the instrument. The string needs to - be a valid key in :RESOLUTION: - :type _instrument: string - - - *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* - :return: Image data, Weight data, Header with optimal WCS - :rtype: list - ''' - from reproject.mosaicking import find_optimal_celestial_wcs - from reproject import reproject_interp - from reproject.mosaicking import reproject_and_coadd - - wcs_out, shape_out = find_optimal_celestial_wcs(_imglist, resolution=RESOLUTION[_instrument]) - array, footprint = reproject_and_coadd(_imglist, wcs_out, shape_out=shape_out, reproject_function=reproject_interp, input_weights = _weightlist, combine_function='mean') - return array, footprint, wcs_out.to_header() - - -def trim_nan(_hdu): - ''' - When downloading an image from PS1, the image is padded with Nan. - This function, trim the image, removing the np.nan array, and preserving - the WCS. - - NOTE: the flux of the full skycell stack images are scaled. Astropy - rescale the data automatically as soon as the data attribute of the - hdu is accessed. See https://docs.astropy.org/en/stable/io/fits/usage/image.html - for more details. - - *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* - :param _hdu: HUDList containing the data to trim - :type _hdu: `~astropy.io.fits.HDUList` - - *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* - :return: HDUList with edges trimmed - :rtype: `~astropy.io.fits.HDUList` - - ''' - - from astropy.nddata import Cutout2D - - # Getting all the columns with only np.nan in them - good_cols = np.all(np.isnan(_hdu.data),axis=1) - - # Retrieving the first and the last index of good_cols == False, - # i.e. the first and last columns containing (at least partially) - # real data - for i in range(len(good_cols)): - if not good_cols[i]: - # Adding 10 pixel padding to account for edge artifact - first_col = i+10 - break - - for i in range(len(good_cols)-1,-1,-1): - if not good_cols[i]: - # We want to include the index, so we add 1, butting the - # edge right after the column. Subtracting 10 pixel padding - # to account for edge artifact. - last_col = i+1-10 - break - - - # Doing the same with the rows - good_rows = np.all(np.isnan(_hdu.data),axis=0) - for i in range(len(good_rows)): - if not good_rows[i]: - first_row = i+10 - break - - for i in range(len(good_rows)-1,-1,-1): - if not good_rows[i]: - last_row = i+1-10 - break - - size_x = last_col-first_col - size_y = last_row-first_row - cen_x = math.floor((last_col+first_col)/2.) - cen_y = math.floor((last_row+first_row)/2.) - - # No idea why x and y are inverted between the center and the size - # ¯\_(ツ)_/¯ - cut = Cutout2D(_hdu.data, (cen_y,cen_x), (size_x,size_y), wcs=WCS(_hdu.header)) - - out_hdu = pf.PrimaryHDU() - out_hdu.data = cut.data - out_hdu.header = _hdu.header.copy() - out_hdu.header.update(cut.wcs.to_header()) - - return out_hdu diff --git a/gw/survey_queries/DECam_search.py b/gw/survey_queries/DECam_search.py new file mode 100644 index 00000000..11f4d397 --- /dev/null +++ b/gw/survey_queries/DECam_search.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +"""DECam_search.py + +""" + +import urllib.request +from numpy.core.defchararray import startswith +from astropy import units as u +from pyvo.dal import sia +#from astroquery.hips2fits import hips2fits +#from astropy import wcs as astropy_wcs +import astropy.io.fits as pf + +# List of available services at: +# https://datalab.noirlab.edu/docs/manual/UsingAstroDataLab/DataAccessInterfaces/SimpleImageAccessSIA/SimpleImageAccessSIA.html +#COLLECTION = 'coadd_all' +COLLECTION = 'ls_dr9' +COLLECTION = 'coadd/decaps_dr2' +COLLECTION = 'delve_dr2' +DECAM_SERVICE = f'https://datalab.noirlab.edu/sia/{COLLECTION}' + + +#url = 'https://alasky.cds.unistra.fr/hips-image-services/hips2fits#hips=CDS%2FP%2FDECaPS%2FDR2%2Fg&ra=119.51849999999999&dec=-27.298400000000004&fov=0.4&projection=TAN&format=fits' +#url = 'https://alasky.cds.unistra.fr/hips-image-services/hips2fits?hips=CDS%2FP%2FDECaPS%2FDR2%2Fg&width=1200&height=900&fov=0.4&projection=TAN&coordsys=icrs&rotation_angle=0.0&ra=119.51849999999999&dec=-27.298400000000004&format=fits' +#url = 'https://alasky.cds.unistra.fr/hips-image-services/hips2fits?hips=CDS%2FP%2FPanSTARRS%2FDR1%2Fg&width=1200&height=900&fov=0.4&projection=TAN&coordsys=icrs&rotation_angle=0.0&ra=48.837009&dec=41.611606&format=fits' +#urllib.request.urlretrieve(url, 'test.fits') + +# hips2fits.timeout = 180 + +# w = astropy_wcs.WCS(header={ +# 'NAXIS1': 4100, # Width of the output fits/image +# 'NAXIS2': 4100, # Height of the output fits/image +# 'WCSAXES': 2, # Number of coordinate axes +# 'CRPIX1': 2050, # Pixel coordinate of reference point +# 'CRPIX2': 2050, # Pixel coordinate of reference point +# 'CDELT1': -0.000108056, # [deg] Coordinate increment at reference point +# 'CDELT2': 0.000108056, # [deg] Coordinate increment at reference point +# 'CUNIT1': 'deg', # Units of coordinate increment and value +# 'CUNIT2': 'deg', # Units of coordinate increment and value +# 'CTYPE1': 'RA---TAN', # galactic longitude, Mollweide's projection +# 'CTYPE2': 'DEC--TAN', # galactic latitude, Mollweide's projection +# 'CRVAL1': 48.837009, # [deg] Coordinate value at reference point +# 'CRVAL2': 41.611606, # [deg] Coordinate value at reference point +# }) +# hips = 'CDS/P/PanSTARRS/DR1/g' +# result = hips2fits.query_with_wcs( +# hips=hips, +# wcs=w +# ) + +# result.writeto('test.fits', overwrite=True) + +def search_for_DECam(self, survey): + + from . import generate_FOV_grid, LCO_INSTRUMENTS, SURVEYS + + connect = sia.SIAService(DECAM_SERVICE) + table = connect.search(pos = (self.coord.ra.deg, self.coord.dec.deg), size = (LCO_INSTRUMENTS['sinistro'].fov.to(u.deg).value, LCO_INSTRUMENTS['sinistro'].fov.to(u.deg).value), verbosity=2).to_table() + + with open('table.csv','w') as out: + for c in table.columns: + out.write(c+',') + out.write('\n') + for i in range(len(table[table.columns[0]])): + for c in table.columns: + out.write(str(table[c][i])+',') + out.write('\n') + + exit() + + + for flt in self.filters: + + sel = (table['prodtype'] == 'image') & (startswith(table['obs_bandpass'].astype(str), flt)) + + if len(table[sel]) != 0: + out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.fits' + urllib.request.urlretrieve(table[sel]['access_url'][0], out_name) + self.templates_paths[f'{survey}_{flt}'] = out_name + with pf.open(out_name, mode='update') as hdu: + hdu[0].header['MJD-OBS'] = hdu[0].header['MJD_MEAN'] + hdu[0].header['DATE-OBS'] = hdu[0].header['DATEOBS'] + hdu[0].header['DAY-OBS'] = hdu[0].header['DATE-OBS'].split('T')[0] + hdu[0].header['FILTER'] = flt+'p' #note, this works only with griz + hdu[0].header['OBJECT'] = self.obj + hdu[0].header['WCSERR'] = 0 + hdu[0].header['RA'] = self.coord.ra.deg + hdu[0].header['DEC'] = self.coord.dec.deg + + hdu.flush() + else: + print(f'Coordinates {self.coord.ra.deg} {self.coord.dec.deg} not in DECam with filter {flt}.') \ No newline at end of file diff --git a/gw/survey_queries/PanSTARRS_search.py b/gw/survey_queries/PanSTARRS_search.py new file mode 100644 index 00000000..9cc5a45c --- /dev/null +++ b/gw/survey_queries/PanSTARRS_search.py @@ -0,0 +1,217 @@ +# -*- coding: utf-8 -*- +"""PanSTARRS_search.py + +""" + +from astropy.table import Table +from astropy import units as u +from astropy.time import Time +from astropy.wcs import WCS +import astropy.io.fits as pf + +# PS1 urls +PS1_TAB = 'https://ps1images.stsci.edu/cgi-bin/ps1filenames.py' + +# PS1 provides a cutout API. We probably prefer to use the whole +# skycell, but we can easily change that with this bool variable +SKYCELL = True +if SKYCELL: + PS1_CUT = 'https://ps1images.stsci.edu/' +else: + PS1_CUT = 'https://ps1images.stsci.edu/cgi-bin/fitscut.cgi' + + +def search_for_PS1(query, survey): + ''' + This function searches and downloads PS1 templates. + Together with the data, it will fetch also the weight images (i.e. 1/variance) + and the mask images. + + ''' + + from . import generate_FOV_grid, LCO_INSTRUMENTS, SURVEYS + + # Instruments are sorted by FOV size. + # :query.instruments[0]: is the instrument with the biggest FOV + # and therefore requires the biggest template. We start creating + # this, and instead of repeating the procedure, we just scale down + # this first template to match the smaller requirements. + inst = query.instruments[0] + + # Generate grid of points covering the FOV + grid = generate_FOV_grid(_center = query.coord, _fov = LCO_INSTRUMENTS[inst].fov, _step=SURVEYS[survey].skycell_size) + + # NOTE: wt are actually variance maps + data_lists = {'stack':{}, 'mask':{}, 'wt':{}} + + while True: + # Initially :grid[0]: will be the center of the field. + # After each iteration, the :grid: array is trimmed down + # and its first element will be the point on the grid + # most distant from the center + + # Creating the url to query the PS1 database. + # NOTE: PS1 needs the size in pixels, considering 0.25 arcsec/pixel. + # This first query returns a Table with a list of files + print(f'Querying the PS1 database for coordinates {grid[0].ra.deg} {grid[0].dec.deg}.') + tableurl = f'{PS1_TAB}?ra={grid[0].ra.deg}&dec={grid[0].dec.deg}&size={LCO_INSTRUMENTS[inst].fov.to(u.arcsec).value/0.25}&format=fits&filters={query.filters}&type=stack,stack.wt,stack.mask' + table = Table.read(tableurl, format='ascii') + + # If the table is empty, the images are not in PS1 + if len(table) == 0: + print(f'\nThe provided coordinates {grid[0].ra.deg},{grid[0].dec.deg} do not appear to be in the PS1 footprint.\n') + return + + print('Coordinates in PS1 footprint.') + for tab in table: + flt = tab['filter'] + filename = tab['filename'] + filetype = tab['type'].split('.')[-1] + + # Initializing dictionaries + if flt not in data_lists[filetype]: + data_lists[filetype][flt]=[] + data_lists[filetype][flt]=[] + data_lists[filetype][flt]=[] + + if SKYCELL: + url = f'{PS1_CUT}{filename}' + else: + url = f'{PS1_CUT}?ra={grid[0].ra.deg}&dec={grid[0].dec.deg}&size=6000&format=fits&red={filename}' + + print(f'Retrieving file {filename}') + with pf.open(url) as hdulist: + # The skycell headers need some adjustments + if SKYCELL: + #hdulist[1].header['TIMESYS'] = 'TAI' + hdulist[1].header['RADESYSa'] = 'FK5' + hdulist[1].header['DATE-OBS'] = 'T'.join(Time(hdulist[1].header['MJD-OBS'], format='mjd', scale='utc').iso.split()) + hdulist[1].header['PC1_1'] = hdulist[1].header['PC001001'] + hdulist[1].header['PC1_2'] = hdulist[1].header['PC001002'] + hdulist[1].header['PC2_1'] = hdulist[1].header['PC002001'] + hdulist[1].header['PC2_2'] = hdulist[1].header['PC002002'] + del hdulist[1].header['PC001001'] + del hdulist[1].header['PC001002'] + del hdulist[1].header['PC002001'] + del hdulist[1].header['PC002002'] + + # The algorithm for decompressing fits file data + # doesn't work on PS1 skycells since astropy version 5.3 + # The easiest way to fix this, is to remove the + # 'ZBLANK' key (assuming it's equal to 'BLANK') + if 'ZBLANK' in hdulist[1].header: + del hdulist[1].header['ZBLANK'] + + # The image flux scaling is also non-standard for the stack images + if filetype != 'mask': + hdulist[1].data = hdulist[1].header['BOFFSET'] + hdulist[1].header['BSOFTEN'] * (10**(0.4*hdulist[1].data) - 10**(-0.4*hdulist[1].data)) + del hdulist[1].header['BLANK'] + + # Getting rid of the edge nans + fixed_hdu = trim_nan(hdulist[1]) + else: + fixed_hdu = trim_nan(hdulist[0]) + + del fixed_hdu.header['RADESYS'] + + # Since the weight images are actually variance maps, we need + # to take the reciprocal of it + if filetype == 'wt': + fixed_hdu.data = 1/fixed_hdu.data + + data_lists[filetype][flt].append(fixed_hdu) + + for skycell in data_lists['stack'][query.filters[0]]: + #trimming down the grid, leaving the points currently not in any skycell + grid = [p for p in grid if not p.contained_by(WCS(skycell.header))] + + if len(grid) == 0: + break + + return data_lists['stack'], data_lists['wt'] + + +def trim_nan(_hdu): + ''' + When downloading an image from PS1, the image is padded with Nan. + This function, trim the image, removing the np.nan array, and preserving + the WCS. + + NOTE: the flux of the full skycell stack images are scaled. Astropy + rescale the data automatically as soon as the data attribute of the + hdu is accessed. See https://docs.astropy.org/en/stable/io/fits/usage/image.html + for more details. + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :param _hdu: HUDList containing the data to trim + :type _hdu: `~astropy.io.fits.HDUList` + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :return: HDUList with edges trimmed + :rtype: `~astropy.io.fits.HDUList` + + ''' + + from astropy.nddata import Cutout2D + import numpy as np + import math + + # Getting all the columns with only np.nan in them + good_cols = np.all(np.isnan(_hdu.data),axis=1) + + # Retrieving the first and the last index of good_cols == False, + # i.e. the first and last columns containing (at least partially) + # real data + for i in range(len(good_cols)): + if not good_cols[i]: + # Adding 10 pixel padding to account for edge artifact + first_col = i+10 + break + + for i in range(len(good_cols)-1,-1,-1): + if not good_cols[i]: + # We want to include the index, so we add 1, butting the + # edge right after the column. Subtracting 10 pixel padding + # to account for edge artifact. + last_col = i+1-10 + break + + + # Doing the same with the rows + good_rows = np.all(np.isnan(_hdu.data),axis=0) + for i in range(len(good_rows)): + if not good_rows[i]: + first_row = i+10 + break + + for i in range(len(good_rows)-1,-1,-1): + if not good_rows[i]: + last_row = i+1-10 + break + + size_x = last_col-first_col + size_y = last_row-first_row + cen_x = math.floor((last_col+first_col)/2.) + cen_y = math.floor((last_row+first_row)/2.) + + # No idea why x and y are inverted between the center and the size + # ¯\_(ツ)_/¯ + cut = Cutout2D(_hdu.data, (cen_y,cen_x), (size_x,size_y), wcs=WCS(_hdu.header)) + + out_hdu = pf.PrimaryHDU() + out_hdu.data = cut.data + out_hdu.header = _hdu.header.copy() + out_hdu.header.update(cut.wcs.to_header()) + + return out_hdu + +def track_skycells_in_header(_header, _data): + # Keeping track of the single skycells that are being combined. + _header['NSKYCELL'] = (len(_data), 'Number of SDSS skycells merged') + for i,im in enumerate(_data, start=1): + _header[f'STK_TY{i:>02}'] = (im.header['STK_TYPE'], f'type of stack {i}') + _header[f'STK_ID{i:>02}'] = (im.header['STK_ID'], f'type of stack {i}') + _header[f'SKYCEL{i:>02}'] = (im.header['SKYCELL'], f'type of stack {i}') + _header[f'TES_ID{i:>02}'] = (im.header['TESS_ID'], f'type of stack {i}') + + return _header \ No newline at end of file diff --git a/gw/survey_queries/SDSS_search.py b/gw/survey_queries/SDSS_search.py new file mode 100644 index 00000000..6e845b4f --- /dev/null +++ b/gw/survey_queries/SDSS_search.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +"""SDSS_search.py + +""" + +from astroquery.sdss import SDSS +from astropy import units as u +from astropy.time import Time +from astropy.wcs import WCS +import astropy.io.fits as pf +import numpy as np + +def search_for_SDSS(self, survey): + ''' + This function searches and downloads SDSS templates. + Together with the data, it will fetch also the variance images + + ''' + + # Instruments are sorted by FOV size. + # :self.instruments[0]: is the instrument with the biggest FOV + # and therefore requires the biggest template. We start creating + # this, and instead of repeating the procedure, we just scale down + # this first template to match the smaller requirements. + + from . import generate_FOV_grid, LCO_INSTRUMENTS, SURVEYS + + inst = self.instruments[0] + + # Generate grid of points covering the FOV + grid = generate_FOV_grid(_center = self.coord, _fov = LCO_INSTRUMENTS[inst].fov, _step=SURVEYS[survey].skycell_size) + + all_pointings = [] + + img_data_list = {} + weight_data_list = {} + + for f,flt in enumerate(self.filters): + + img_data_list[flt] = [] + weight_data_list[flt] = [] + + # For the first filter, we need to collect the references for all the required SDSS fields + # We don't need to repeat the query for the following filters + # The SDSS fields references are stored in :all_pointings: + if f == 0: + while True: + # SDSS queries radius must be less than 3.0 arcmin. + # Initially :grid[0]: will be the center of the field. + # After each iteration, the :grid: array is trimmed down and its first element will be + # the point on the grid most distant from the center + print(f'Querying the SDSS database for coordinates {grid[0].ra.deg} {grid[0].dec.deg}.') + xid = SDSS.query_region(grid[0], radius=3*u.arcmin, spectro=False) + if xid is None: + print(f'\nThe provided coordinates {grid[0].ra.deg},{grid[0].dec.deg} do not appear to be in the SDSS footprint.\n') + return + + print('Coordinates in SDSS footprint.') + new_pointings = [list(el) for el in np.unique(xid['run','camcol','field']) if list(el) not in all_pointings] + + all_pointings = all_pointings + new_pointings + + for run, camcol, field in new_pointings: + #fetching the fits as an astropy hdu + print(f'Retrieving file with run={run}, camcol={camcol}, field={field} and filter {flt}.') + im = SDSS.get_images(run=run, camcol=camcol, field=field, band=flt, cache=True)[0] + + sky_subtracted_original_image_hdu, weight_image_hdu = elaborate_SDSS(im, flt, run, camcol, field) + img_data_list[flt].append(sky_subtracted_original_image_hdu) + weight_data_list[flt].append(weight_image_hdu) + + + for tile in img_data_list[flt]: + grid = [p for p in grid if not p.contained_by(WCS(tile.header))] + + if len(grid) == 0: + break + + else: + #Note that we changed the list to all_pointings now that we have them all + for run, camcol, field in all_pointings: + #fetching the fits as an astropy hdu + im = SDSS.get_images(run=run, camcol=camcol, field=field, band=flt, cache=True)[0] + + sky_subtracted_original_image_hdu, weight_image_hdu = elaborate_SDSS(im, flt, run, camcol, field) + img_data_list[flt].append(sky_subtracted_original_image_hdu) + weight_data_list[flt].append(weight_image_hdu) + + return img_data_list, weight_data_list + + + +def elaborate_SDSS(_im, _flt, _run, _camcol, _field): + ''' + SDSS images require a little bit of manipulation. + This function will produce a ready to be used image + as well as its corresponding weight image. + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :param _im: SDSS image HDUList + :type _im: `~astropy.io.fits.HDUList` + + :param _flt: SDSS filter + :type _flt: string + + :param _run: Run number + :type _run: int + + :param _camcol: Column in the imaging camera + :type _camcol: int + + :param _field: Field number + :type _field: int + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :return: HDU of the image, HDU of the weight image + :rtype: list + + ''' + + from scipy import interpolate + + header = _im[0].header + + #Although we do not write out each single tile, this edits in the header will + #make it easier to keep track of each tile in the merged fits file + header['RADESYSa'] = header['RADECSYS'] + del header['RADECSYS'] + header['MJD-OBS'] = Time(f"{header['DATE-OBS']} {header['TAIHMS']}", format='iso', scale='tai').mjd + header['DATE-OBS'] = 'T'.join(Time(header['MJD-OBS'], format='mjd', scale='utc').iso.split()) + #For some reason the field info is not stored in the header. There is a "FRAME" key, but it's different + header['FIELD'] = _field + + #retrieve gain and darkVariance from another online table + q = SDSS.query_sql(f'SELECT DISTINCT gain_{_flt}, darkVariance_{_flt} FROM field WHERE run={_run} AND camcol={_camcol}') + gain = q[f'gain_{_flt}'][0] + darkVariance = q[f'darkVariance_{_flt}'][0] + + #im[2] contains the sky + sky, x, y = _im[2].data[0] + x_old = np.arange(np.shape(sky)[0]) + y_old = np.arange(np.shape(sky)[1]) + + # `interp2d` is deprecated in SciPy 1.10 and will be removed in SciPy 1.14.0 + # se We are using `interpolate.RegularGridInterpolator` instead + # NOTE: `interp2d` has x and y inverted but doesn't need a `meshgrid` to produce the output + # This is the code if `interp2d` is still preferred despite of its deprecation + # tck = interpolate.interp2d(y_old, x_old, sky, kind=method) + # sky_image = tck(x, y) + + tck = interpolate.RegularGridInterpolator((x_old,y_old), sky, method="linear", bounds_error=False, fill_value = None) + + # need to invert x and in y to produce the meshgrid with the right shape + x_grid, y_grid = np.meshgrid(y, x, indexing='ij') + + sky_image = tck((x_grid, y_grid)) #in ADU + + #im[1] contains a row with factors nanomaggies per counts for each column + calib_image = np.asarray([_im[1].data.tolist()]*np.shape(_im[0].data)[0]) + + sky_subtracted_original_image = _im[0].data / calib_image #in ADU + + variance_image = ((_im[0].data / calib_image + sky_image) / gain + darkVariance)# in ADU * calib_image **2 + + sky_subtracted_original_image_hdu = pf.PrimaryHDU(sky_subtracted_original_image, header=header) + #to reproject and combine, we just need the weight, not the variance + weight_image_hdu = pf.PrimaryHDU(1/variance_image, header=header) + + return sky_subtracted_original_image_hdu, weight_image_hdu + +def track_skycells_in_header(_header, _data): + _header['NTILES'] = (len(_data), 'Number of SDSS tiles merged') + for i,im in enumerate(_data, start=1): + _header[f'RUN{i:>03}'] = (im.header['RUN'], f'Run number of tile {i}') + _header[f'CAMCO{i:>03}'] = (im.header['CAMCOL'], f'Column in the imaging camera of tile {i}') + _header[f'FIELD{i:>03}'] = (im.header['FIELD'], f'Field number of tile {i}') + _header[f'MJD{i:>03}'] = (im.header['MJD-OBS'], f'MJD of tile {i}') + + return _header \ No newline at end of file diff --git a/gw/survey_queries/Skymapper_search.py b/gw/survey_queries/Skymapper_search.py new file mode 100644 index 00000000..286625a5 --- /dev/null +++ b/gw/survey_queries/Skymapper_search.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +"""Skymapper_search.py + +""" + +from astropy.io.votable import parse +import io + +# Skymapper urls +SKY_CUT = 'https://api.skymapper.nci.org.au/public/siap/dr4/query' + +@set_survey('Skymapper') +def search_for_Skymapper(self, survey): + ''' + Skymapper has the ability to search for multiple filters in one go, + but the returned list of file loses the information on the filter, + so I need to send a separate query for each filter. + Biggest size for Skymapper is 0.17 deg + + ''' + from astropy import units as u + for flt in self.filters: + + url = f'{SKY_CUT}?POS={self.coord.ra.deg},{self.coord.dec.deg}&SIZE=0.17,0.17&BAND={flt}&FORMAT=image/fits&INTERSECT=covers&RESPONSEFORMAT=VOTABLE' + + + u = urllib.request.urlopen(url) + s = io.BytesIO(u.read()) + votable = parse(s) + + for resource in votable.resources: + for t in resource.tables: + table = t.array['get_fits'][:] + + if len(table) != 0: + out_name = f'{OUT_FOLDER}/{survey}/{self.obj}_{flt}.fits' + urllib.request.urlretrieve(table[0], out_name) + self.templates_paths[f'{survey}_{flt}'] = out_name + with pf.open(out_name, mode='update') as hdu: + hdu[0].header['DAY-OBS'] = hdu[0].header['DATE-OBS'].split('T')[0] + hdu[0].header['FILTER'] = flt+'p' #note, this works only with griz + hdu[0].header['OBJECT'] = self.obj + hdu[0].header['WCSERR'] = 0 + hdu[0].header['RA'] = self.coord.ra.deg + hdu[0].header['DEC'] = self.coord.dec.deg + + hdu.flush() + + else: + print(f'Coordinates {self.coord.ra.deg} {self.coord.dec.deg} not in Skymapper with filter {flt}.') \ No newline at end of file diff --git a/gw/survey_queries/__init__.py b/gw/survey_queries/__init__.py new file mode 100644 index 00000000..55d44c3c --- /dev/null +++ b/gw/survey_queries/__init__.py @@ -0,0 +1,144 @@ +from astropy import units as u +import os +import numpy as np + +from . import PanSTARRS_search +from . import SDSS_search + +# Folder definitions +OUT_FOLDER = os.getenv('GWTEMPLATES_FOLDER') +#OUT_FOLDER = '/supernova/data/extdata/GW_templates/O4' + +class Instrument: + def __init__(self, _fov, _resolution): + self.fov = _fov + self.resolution = _resolution + +class Survey: + def __init__(self, _telescope, _instrument, _skycell_size, _track_skycells_in_header): + self.telescope = _telescope + self.instrument = _instrument + self.skycell_size = _skycell_size + self.track_skycells_in_header = _track_skycells_in_header + +# Specifications for different instruments +# The FOV here is thought as the diameter of a circle that would circumscribe the LCO image. +# This is effectively bigger than the actual FOV of each instrument, but it will guarantee +# that the full FOV is covered given any rotation +LCO_INSTRUMENTS = { + 'sinistro': Instrument(_fov = 26.5*u.arcmin, _resolution = 0.389*u.arcsec), + 'muscat' : Instrument(_fov = 9.1*u.arcmin, _resolution = 0.27*u.arcsec), + # The FOV of 'qhy' is rectangular, so we calculate the side of a square having the same diagonal. + 'qhy' : Instrument(_fov = np.sqrt((1.9**2 + 1.2**2)/2)*u.deg, _resolution = 0.74*u.arcsec) +} + +SURVEYS ={ + 'PS1': Survey(_telescope = 'PS1', _instrument = 'GPC1', _skycell_size = 0.4*u.deg, _track_skycells_in_header = PanSTARRS_search.track_skycells_in_header), + 'SDSS': Survey(_telescope = '2.5m', _instrument = 'SDSS', _skycell_size = 10*u.arcmin, _track_skycells_in_header = SDSS_search.track_skycells_in_header) +} + +def generate_FOV_grid(_center, _fov, _step, circle=True, wcs = None): + ''' + The function generates a grid of points, each distant :_step: to each other. + The construction follows a recursive hexagonal ring, that becomes bigger and + bigger until it encompass the whole FOV. It takes advantage of the class + `~astropy.visualization.wcsaxes.SphericalCircle`, which creates a circle that + is formed of all the points that are within a certain angle of the central + coordinates on a sphere. Each hexagonal ring is built on top of these circles. + + If :_step: is bigger than half of the fov size, then the grid would only have + the central point. This can cause the FOV to not be fully covered by the + template tiles. To fix this, in this case the grid will be just the central + point plus an octagon with a diagonal equal to half of the diagonal of the + FOV. The octagon is chosen to account for any rotation of the FOV. + + If :circle: is True (default), the grid will trimmed to a circle with radius + equal to :_fov: + + If :wcs: is provided, the points outside of the provided WCS are removed. + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :param _center: Coordinates of the center point of the grid + :type _center: `~astropy.units.Quantity` ['angle'] + + :param _fov: size of the FOV to tile with the grid. This assumes a square + FOV. This quantity effectively translates to the length of the + apothem of outermost hexagon, which will be equal to half of + the diagonal of the square FOV. + :type _fov: `~astropy.units.Quantity` ['angle'] + + :param _step: distance of each point in the grid. If a value is not + provided, it will default to half :_fov: + :type _step: `~astropy.units.Quantity` ['angle'] + + :param circle: If True, trims the grid to a circle with radius :_fov: + :type circle: bool + :default: True + + :param wcs: WCS information. If provided the points outside of it will + be removed + :type wcs: `~astropy.wcs.WCS` or None + :default wcs: None + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :return: List of points of the grid. Each element is a astropy SkyCoord + :rtype: list + + ''' + + from astropy.visualization.wcsaxes import SphericalCircle + from astropy.coordinates import SkyCoord + import math + + points = [[_center, 0*u.deg]] + + # If the the step is too big, we will take an octagonal ring + if _step > _fov/2: + vertexes = SphericalCircle(_center, radius = _fov*np.sqrt(2)/2, resolution=8).get_verts() + # :SphericalCircle: will produce a Polygon, so the first and the last + # point will coincide. Therefore we need to remove the last vertex. + for v in vertexes[:-1]: + p = SkyCoord(*v, unit=u.deg) + points.append([p, p.separation(_center)]) + + else: + # A square FOV of side A, its diagonal is A*sqrt(2). Therefore an + # hexagon will have to have a diagonal of A*sqrt(2)/sqrt(3) in order to contain + # it all, given any rotation. We add an extra order just to be sure + + for ring in range(1, math.ceil(_fov/_step*np.sqrt(2)/np.sqrt(3))+1): + current_hexagon = [] + vertexes = SphericalCircle(_center, radius = _step*ring, resolution=6).get_verts() + for v in vertexes: + p = SkyCoord(*v, unit=(u.deg,u.deg)) + current_hexagon.append([p, p.separation(_center)]) + #Filling the sides of the hexagon with points + for side in range(6): + # We need to split the side on the nth hexagon + # in n segments + for i in range(1, ring): + a = i / ring # rescale 0 < i < n --> 0 < a < 1 + new_point = (1 - a) * current_hexagon[side][0].cartesian + a * current_hexagon[side+1][0].cartesian # interpolate cartesian coordinates + new_point = SkyCoord(new_point) + current_hexagon.append([new_point, new_point.separation(_center)]) + # :SphericalCircle: will produce a Polygon, so the first and the last + # point will coincide. Therefore we need to remove the last vertex of + # the current hexagon. + current_hexagon.pop(6) + points = points + current_hexagon + + if circle: + #adding 5 arcsec to give an extra buffer + points = [p for p in points if p[1] <= (_fov/2*np.sqrt(2))+5*u.arcsec] + + if wcs: + points = [p for p in points if p[0].contained_by(wcs)] + + # Sorting the list according to the distance, with the most distant points first + points.sort(key = lambda x: x[1], reverse=True) + + #Putting the center back at the beginning + points = [points.pop()] + points + + #Returning just the list of coordinates, without the distance + return [p[0] for p in points] \ No newline at end of file diff --git a/gw/survey_queries/query.py b/gw/survey_queries/query.py new file mode 100644 index 00000000..3bf17227 --- /dev/null +++ b/gw/survey_queries/query.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- +"""survey_queries.py + +""" + +from astropy.time import Time +from astropy.wcs import WCS +import astropy.io.fits as pf +import numpy as np + +from . import PanSTARRS_search, SDSS_search, DECam_search, generate_FOV_grid, LCO_INSTRUMENTS, SURVEYS, OUT_FOLDER + +class template_query: + ''' + This class will query the surveys looking for the optical templates. + Each function is associated with a survey. First it will check if + the coordinates are in the footprint of that survey. Each function + will return True or False accordingly. In addition, if the coordinates + are indeed in the footprint, then the function will store the hdu of + the template in the 'hdu' attribute. + + ''' + + def __init__(self, _obj, _coord, _filters, _inst): + ''' + Initializing the class with the name of the object :_obj:, + the coordinates :_coord: and the filters :_filters: to search. + The template images will be resampled to match the resolution + of the instrument (or list of instruments) provided by :_inst:. + The template FOV will also be as big as to include the whole + FOV of that instrument. + The empty dictionary :templates_paths: is also created. + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :_obj: Name of the object, mainly for file naming purposes + :type _obj: string + + :_coord: coordinates of the object, corresponding to the center + of the search + :type _coord: astropy.coordinates.sky_coordinate.SkyCoord + + :_filters: filters to search. If searching for multiple filters + use a single long string, e.g. 'gri' + :type _filters: string + + :_inst: instrument name, or list of instrument names. In order + to be valid, the name will be checked if it is in the + dictionary :LCO_INSTRUMENTS:, defined above. + :type _inst: string or list + + ''' + self.obj = _obj + self.coord = _coord + self.filters = _filters + + # Making the :instruments: list and checking the instruments exist. + if isinstance(_inst, str): + if _inst == 'all': + self.instruments = list(LCO_INSTRUMENTS.keys()) + else: + self.instruments = [_inst] + else: + self.instruments = _inst + for i in self.instruments: + if i not in LCO_INSTRUMENTS.keys(): + raise ValueError(f'Instrument `{i}` does not appear to be currently supported. If this is a new instrument, you will need to included it in `LCO_INSTRUMENTS`.') + + # Sorting the instruments in decreasing FOV size. + self.instruments.sort(key = lambda x: LCO_INSTRUMENTS[x].fov, reverse=True) + + self.templates_paths = {} + + + # Defining a decorator. Because decorators are cool + def set_survey(_survey): + ''' + This decorator set the name of survey for the following function, through the + :_survey: variable, and initialize the :self.templates_paths: dictionary with + a '--' string for that specific survey. The idea is that by initializing the + dictionary, there will be a record that the search has been performed. + Otherwise the dictionary will not have any entry for that survey. + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :_survey: Name of the survey + :type _center: string + + *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :return: Decorated function + :rtype: function + + ''' + def decorate(function): + def initialize_survey(self): + # Initializing all the filters + for flt in self.filters: + for inst in self.instruments: + self.templates_paths[f'{inst}_{_survey}_{flt}'] = '--' + print(f'Searching for {_survey} templates.\n') + return function(self, _survey) + return initialize_survey + return decorate + + @set_survey('PS1') + def search_for_PS1(self, survey): + data, weights = PanSTARRS_search.search_for_PS1(self, survey) + self.make_templates(_data = data, _weights = weights, _survey = survey) + print('This is done') + + @set_survey('SDSS') + def search_for_SDSS(self, survey): + data, weights = SDSS_search.search_for_SDSS(self, survey) + self.make_templates(_data = data, _weights = weights, _survey = survey) + + @set_survey('DECam') + def search_for_DECam(self, survey): + data, weights = DECam_search.search_for_DECam(self, survey) + self.make_templates(_data = data, _weights = weights, _survey = survey) + + + def make_templates(self, _data, _weights, _survey): + + from reproject.mosaicking import find_optimal_celestial_wcs + from reproject import reproject_interp + from reproject.mosaicking import reproject_and_coadd + + # We now trim the list of skycells according to each instrument FOV + # For the first instrument this step should be redundant since we + # selected the skycell in order to tile the FOV already. + for inst in self.instruments: + new_grid = generate_FOV_grid(_center = self.coord, _fov = LCO_INSTRUMENTS[inst].fov, _step=SURVEYS[_survey].skycell_size) + for flt in _data: + # Since enumerate() returns a generator and generators can't be reversed, + # we need to convert it to a list first + for skycell_idx, skycell in reversed(list(enumerate(_data[flt]))): + contained = False + for p in new_grid: + if p.contained_by(WCS(skycell.header)): + contained = True + break + # If no point of the grid is contained by the skycell, + # drop it from all the data lists + if not contained: + _data[flt].pop(skycell_idx) + _weights[flt].pop(skycell_idx) + + print(f'Reprojecting and combining skycells of filer {flt} for {inst}.') + + wcs_out, shape_out = find_optimal_celestial_wcs(_data[flt], resolution=LCO_INSTRUMENTS[inst].resolution) + array, footprint = reproject_and_coadd(_data[flt], wcs_out, shape_out=shape_out, reproject_function=reproject_interp, input_weights = _weights[flt], combine_function='mean') + header = wcs_out.to_header() + + header = SURVEYS[_survey].track_skycells_in_header(_header = header, _data = _data[flt]) + + # Populating the the header with the keys needed by the pipeline + header['RA'] = self.coord.ra.deg + header['DEC'] = self.coord.dec.deg + header['CAT-RA'] = self.coord.ra.deg + header['CAT-DEC'] = self.coord.dec.deg + header['MJD-OBS'] = (np.mean([im.header['MJD-OBS'] for im in _data[flt]]), 'Average MJD') + header['DATE-OBS'] = 'T'.join(Time(header['MJD-OBS'], format='mjd', scale='utc').iso.split()) + header['DAY-OBS'] = header['DATE-OBS'].split('T')[0].replace('-', '') + header['TELESCOP'] = SURVEYS[_survey].telescope + header['INSTRUME'] = SURVEYS[_survey].instrument + header['OBJECT'] = self.obj + header['FILTER'] = flt+'p' # NOTE: this works only with griz + header['AIRMASS'] = 1 + header['WCSERR'] = 0 + header['PIXSCALE'] = (LCO_INSTRUMENTS[inst].resolution.value, 'arcsec/pixel') + header['BUNIT'] = ('counts', 'ADUs') + + # Writing out the final combined template + out_name = f'{OUT_FOLDER}/{_survey}/{inst}_{self.obj}_{flt}' + self.templates_paths[f'{inst}_{_survey}_{flt}'] = out_name+'.fits' + pf.writeto(out_name+'.fits', array, header, overwrite=True) + + # Hotpants needs the variance, so we need to write out 1/footprint + header['BUNIT'] = ('counts2', 'ADUs^2') + pf.writeto(out_name+'.var.fits', 1/footprint, header, overwrite=True) + + From 5b84c024708535e1325c92f57e8810bbe050702b Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Tue, 21 May 2024 23:15:38 -0700 Subject: [PATCH 12/21] Edited the views and the hooks to update all o4_galaxy targets at once. Created run_template_search.py to wrap the template_query class and allow to pass a list of targets. Changed the input of template_query to store the info needed in the o4_galaxy table. Adopted the logging of SNEx2 for the queries. --- gw/hooks.py | 54 +++++++++++++-------------- gw/run_template_search.py | 32 ++++++++++++++++ gw/survey_queries/PanSTARRS_search.py | 11 ++++-- gw/survey_queries/SDSS_search.py | 11 ++++-- gw/survey_queries/query.py | 42 ++++++++++++++++----- gw/views.py | 18 ++++++--- 6 files changed, 116 insertions(+), 52 deletions(-) create mode 100644 gw/run_template_search.py diff --git a/gw/hooks.py b/gw/hooks.py index 8d322910..f3bc0c52 100644 --- a/gw/hooks.py +++ b/gw/hooks.py @@ -8,10 +8,7 @@ from tom_observations.models import ObservationRecord import logging from django.conf import settings -import survey_queries -from astropy.coordinates import SkyCoord -from astropy import units as u -from survey_queries.query import template_query +from run_template_search import run_template_search logger = logging.getLogger(__name__) @@ -75,7 +72,7 @@ def cancel_gw_obs(galaxy_ids=[], sequence_id=None, wrapped_session=None): logger.info('Finished canceling GW follow-up observations for sequence {}'.format(sequence_id)) -def ingest_gw_galaxy_into_snex1(target_id, event_id, wrapped_session=None): +def ingest_gw_galaxy_into_snex1(target_list, wrapped_session=None): if wrapped_session: db_session = wrapped_session @@ -85,36 +82,35 @@ def ingest_gw_galaxy_into_snex1(target_id, event_id, wrapped_session=None): o4_galaxies = _load_table('o4_galaxies', db_address=settings.SNEX1_DB_URL) - existing_target = db_session.query(o4_galaxies).filter(o4_galaxies.targetid==target_id) - if existing_target.count() > 0: - if any([t.event_id == event_id for t in existing_target]): - logger.info('Already ingested target {} into o4_galaxies table for event {}'.format(target_id, event_id)) + templates_to_download = [] - else: - logger.info('Found existing target {} in o4_galaxies table, copying it'.format(target_id)) - existing_target_row = existing_target.first() - existing_table = existing_target_row.__table__ #Somehow this is different than the o4_galaxies object - - non_pk_columns = [k for k in existing_table.columns.keys() if k not in existing_table.primary_key.columns.keys()] - data = {c: getattr(existing_target_row, c) for c in non_pk_columns} - data['event_id'] = event_id + for target_id, event_id in target_list: - db_session.add(o4_galaxies(**data)) + existing_target = db_session.query(o4_galaxies).filter(o4_galaxies.targetid==target_id) + if existing_target.count() > 0: + if any([t.event_id == event_id for t in existing_target]): + logger.info('Already ingested target {} into o4_galaxies table for event {}'.format(target_id, event_id)) - else: - snex2_target = Target.objects.get(id=target_id) - ra0 = snex2_target.ra - dec0 = snex2_target.dec - objname = snex2_target.objname + else: + logger.info('Found existing target {} in o4_galaxies table, copying it'.format(target_id)) + existing_target_row = existing_target.first() + existing_table = existing_target_row.__table__ #Somehow this is different than the o4_galaxies object + + non_pk_columns = [k for k in existing_table.columns.keys() if k not in existing_table.primary_key.columns.keys()] + data = {c: getattr(existing_target_row, c) for c in non_pk_columns} + data['event_id'] = event_id - #I'm not sure this is a Skycoord object. If it is, remove the following line - target_coord = SkyCoord(ra0,dec0, unit=(u.deg, u.deg)) + db_session.add(o4_galaxies(**data)) - templ = template_query(objname,target_coord , 'g', ['sinistro','muscat']) - templ.search_for_PS1() - templ.search_for_SDSS() + else: + t = Target.objects.get(id=target_id) + templates_to_download.append([target_id, event_id, t.objname, t.ra, t.dec]) + + # This call will make everything stop until all templates are downloaded. + templates_paths = run_template_search(templates_to_download, _filters = 'griz', _surveys = ['PS1,SDSS'], _instruments = ['sinistro','muscat']) - db_session.add(o4_galaxies(targetid=target_id, event_id=event_id, ra0=ra0, dec0=dec0, **templ.templates_paths)) + for id in templates_paths.keys(): + db_session.add(o4_galaxies(targetid=templates_paths[id].target_id, event_id=templates_paths[id].event_id, ra0=templates_paths[id].ra, dec0=templates_paths[id].dec, **templates_paths[id].templates_paths)) if not wrapped_session: try: diff --git a/gw/run_template_search.py b/gw/run_template_search.py new file mode 100644 index 00000000..e5e2ac4a --- /dev/null +++ b/gw/run_template_search.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +"""run_template_search.py + +Script to search and (if exist) download template +images from available optical surveys. The pixel scale, and the size +of the resulting template will match the characteristics of the LCO +specified. + +The script is based on the :survey_queries: package. This is just +a wrapper to basically run the :query: class for a list of objects. + +""" + +from survey_queries.query import template_query + + +def run_template_search(_targets_list, _filters, _surveys, _instruments): + + + out_dict ={} + for target_id, event_id, name, ra, dec in _targets_list: + s = template_query(target_id, event_id, name, ra, dec, _filters, _instruments) + if 'PS1' in _surveys: + s.search_for_PS1() + if 'SDSS' in _surveys: + s.search_for_SDSS() + + out_dict[target_id] = s + + return out_dict + + diff --git a/gw/survey_queries/PanSTARRS_search.py b/gw/survey_queries/PanSTARRS_search.py index 9cc5a45c..5b21416f 100644 --- a/gw/survey_queries/PanSTARRS_search.py +++ b/gw/survey_queries/PanSTARRS_search.py @@ -8,6 +8,9 @@ from astropy.time import Time from astropy.wcs import WCS import astropy.io.fits as pf +import logging + +logger = logging.getLogger(__name__) # PS1 urls PS1_TAB = 'https://ps1images.stsci.edu/cgi-bin/ps1filenames.py' @@ -53,16 +56,16 @@ def search_for_PS1(query, survey): # Creating the url to query the PS1 database. # NOTE: PS1 needs the size in pixels, considering 0.25 arcsec/pixel. # This first query returns a Table with a list of files - print(f'Querying the PS1 database for coordinates {grid[0].ra.deg} {grid[0].dec.deg}.') + logger.info(f'Querying the PS1 database for coordinates {grid[0].ra.deg} {grid[0].dec.deg}.') tableurl = f'{PS1_TAB}?ra={grid[0].ra.deg}&dec={grid[0].dec.deg}&size={LCO_INSTRUMENTS[inst].fov.to(u.arcsec).value/0.25}&format=fits&filters={query.filters}&type=stack,stack.wt,stack.mask' table = Table.read(tableurl, format='ascii') # If the table is empty, the images are not in PS1 if len(table) == 0: - print(f'\nThe provided coordinates {grid[0].ra.deg},{grid[0].dec.deg} do not appear to be in the PS1 footprint.\n') + logger.info(f'\nThe provided coordinates {grid[0].ra.deg},{grid[0].dec.deg} do not appear to be in the PS1 footprint.\n') return - print('Coordinates in PS1 footprint.') + logger.info('Coordinates in PS1 footprint.') for tab in table: flt = tab['filter'] filename = tab['filename'] @@ -79,7 +82,7 @@ def search_for_PS1(query, survey): else: url = f'{PS1_CUT}?ra={grid[0].ra.deg}&dec={grid[0].dec.deg}&size=6000&format=fits&red={filename}' - print(f'Retrieving file {filename}') + logger.info(f'Retrieving file {filename}') with pf.open(url) as hdulist: # The skycell headers need some adjustments if SKYCELL: diff --git a/gw/survey_queries/SDSS_search.py b/gw/survey_queries/SDSS_search.py index 6e845b4f..6605336d 100644 --- a/gw/survey_queries/SDSS_search.py +++ b/gw/survey_queries/SDSS_search.py @@ -9,6 +9,9 @@ from astropy.wcs import WCS import astropy.io.fits as pf import numpy as np +import logging + +logger = logging.getLogger(__name__) def search_for_SDSS(self, survey): ''' @@ -49,20 +52,20 @@ def search_for_SDSS(self, survey): # Initially :grid[0]: will be the center of the field. # After each iteration, the :grid: array is trimmed down and its first element will be # the point on the grid most distant from the center - print(f'Querying the SDSS database for coordinates {grid[0].ra.deg} {grid[0].dec.deg}.') + logger.info(f'Querying the SDSS database for coordinates {grid[0].ra.deg} {grid[0].dec.deg}.') xid = SDSS.query_region(grid[0], radius=3*u.arcmin, spectro=False) if xid is None: - print(f'\nThe provided coordinates {grid[0].ra.deg},{grid[0].dec.deg} do not appear to be in the SDSS footprint.\n') + logger.info(f'\nThe provided coordinates {grid[0].ra.deg},{grid[0].dec.deg} do not appear to be in the SDSS footprint.\n') return - print('Coordinates in SDSS footprint.') + logger.info('Coordinates in SDSS footprint.') new_pointings = [list(el) for el in np.unique(xid['run','camcol','field']) if list(el) not in all_pointings] all_pointings = all_pointings + new_pointings for run, camcol, field in new_pointings: #fetching the fits as an astropy hdu - print(f'Retrieving file with run={run}, camcol={camcol}, field={field} and filter {flt}.') + logger.info(f'Retrieving file with run={run}, camcol={camcol}, field={field} and filter {flt}.') im = SDSS.get_images(run=run, camcol=camcol, field=field, band=flt, cache=True)[0] sky_subtracted_original_image_hdu, weight_image_hdu = elaborate_SDSS(im, flt, run, camcol, field) diff --git a/gw/survey_queries/query.py b/gw/survey_queries/query.py index 3bf17227..07e73f57 100644 --- a/gw/survey_queries/query.py +++ b/gw/survey_queries/query.py @@ -3,10 +3,15 @@ """ +from astropy.coordinates import SkyCoord +from astropy import units as u from astropy.time import Time from astropy.wcs import WCS import astropy.io.fits as pf import numpy as np +import logging + +logger = logging.getLogger(__name__) from . import PanSTARRS_search, SDSS_search, DECam_search, generate_FOV_grid, LCO_INSTRUMENTS, SURVEYS, OUT_FOLDER @@ -21,10 +26,13 @@ class template_query: ''' - def __init__(self, _obj, _coord, _filters, _inst): + def __init__(self, _target_id, _event_id, _obj, _ra, _dec, _filters, _inst): ''' Initializing the class with the name of the object :_obj:, - the coordinates :_coord: and the filters :_filters: to search. + the coordinates :_ra: and :_dec: and the filters :_filters: to + search. The target ID :_target_id: and the GW event ID :_event_id: + are also included fro completeness, but these are not used by + anything. The template images will be resampled to match the resolution of the instrument (or list of instruments) provided by :_inst:. The template FOV will also be as big as to include the whole @@ -32,12 +40,22 @@ def __init__(self, _obj, _coord, _filters, _inst): The empty dictionary :templates_paths: is also created. *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* + :_target_id: Target ID + :type _target_id: int + + :_event_id: GW event ID + :type _event_id: int + :_obj: Name of the object, mainly for file naming purposes :type _obj: string - :_coord: coordinates of the object, corresponding to the center - of the search - :type _coord: astropy.coordinates.sky_coordinate.SkyCoord + :_ra: RA coordinates of the object in deg, corresponding to + the center of the search + :type _ra: float + + :_dec: Dec coordinates of the object in deg, corresponding to + the center of the search + :type _dec: float :_filters: filters to search. If searching for multiple filters use a single long string, e.g. 'gri' @@ -49,8 +67,13 @@ def __init__(self, _obj, _coord, _filters, _inst): :type _inst: string or list ''' + + self.target_id = _target_id + self.event_id = _event_id self.obj = _obj - self.coord = _coord + self.ra = _ra + self.dec = _dec + self.coord = SkyCoord(_ra, _dec, unit=(u.deg, u.deg)) self.filters = _filters # Making the :instruments: list and checking the instruments exist. @@ -95,7 +118,7 @@ def initialize_survey(self): for flt in self.filters: for inst in self.instruments: self.templates_paths[f'{inst}_{_survey}_{flt}'] = '--' - print(f'Searching for {_survey} templates.\n') + logger.info(f'Searching for {_survey} templates.\n') return function(self, _survey) return initialize_survey return decorate @@ -104,7 +127,6 @@ def initialize_survey(self): def search_for_PS1(self, survey): data, weights = PanSTARRS_search.search_for_PS1(self, survey) self.make_templates(_data = data, _weights = weights, _survey = survey) - print('This is done') @set_survey('SDSS') def search_for_SDSS(self, survey): @@ -143,7 +165,7 @@ def make_templates(self, _data, _weights, _survey): _data[flt].pop(skycell_idx) _weights[flt].pop(skycell_idx) - print(f'Reprojecting and combining skycells of filer {flt} for {inst}.') + logger.info(f'Reprojecting and combining skycells of filer {flt} for {inst}.') wcs_out, shape_out = find_optimal_celestial_wcs(_data[flt], resolution=LCO_INSTRUMENTS[inst].resolution) array, footprint = reproject_and_coadd(_data[flt], wcs_out, shape_out=shape_out, reproject_function=reproject_interp, input_weights = _weights[flt], combine_function='mean') @@ -172,9 +194,11 @@ def make_templates(self, _data, _weights, _survey): out_name = f'{OUT_FOLDER}/{_survey}/{inst}_{self.obj}_{flt}' self.templates_paths[f'{inst}_{_survey}_{flt}'] = out_name+'.fits' pf.writeto(out_name+'.fits', array, header, overwrite=True) + logger.info(f'{out_name}.fits written out.') # Hotpants needs the variance, so we need to write out 1/footprint header['BUNIT'] = ('counts2', 'ADUs^2') pf.writeto(out_name+'.var.fits', 1/footprint, header, overwrite=True) + logger.info(f'{out_name}.var.fits written out.') diff --git a/gw/views.py b/gw/views.py index 8dc3ef4b..4fb920a7 100644 --- a/gw/views.py +++ b/gw/views.py @@ -156,6 +156,9 @@ def submit_galaxy_observations_view(request): db_session = _return_session() failed_obs = [] all_pointings = [] + + snex1_targets = [] + with transaction.atomic(): for galaxy in galaxies: newtarget, created = Target.objects.get_or_create( @@ -165,6 +168,8 @@ def submit_galaxy_observations_view(request): type='SIDEREAL' ) + snex1_targets.append([newtarget.id, galaxy.eventlocalization.nonlocalizedevent.event_id]) + if created: gw = Group.objects.get(name='GWO4') assign_perm('tom_targets.view_target', gw, newtarget) @@ -294,16 +299,17 @@ def submit_galaxy_observations_view(request): record.parameters['name'] = snex_id record.save() - ### Log the target in SNEx1 and ingest template images - run_hook('ingest_gw_galaxy_into_snex1', - newtarget.id, - galaxy.eventlocalization.nonlocalizedevent.event_id, - wrapped_session=db_session) - ### Submit pointing to TreasureMap #pointings = build_tm_pointings(newtarget, observing_parameters) #all_pointings += pointings + + ### Log the target in SNEx1 and ingest template images + run_hook('ingest_gw_galaxy_into_snex1', + snex1_targets, + wrapped_session=db_session) + + #submitted = submit_tm_pointings(galaxy.eventlocalization.sequences.first(), all_pointings) #if not submitted: From d08a6b015440daab40a171e725f99f7a3e15346b Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Wed, 22 May 2024 15:46:09 -0700 Subject: [PATCH 13/21] Made small changes to make this version runnable locally --- gw/views.py | 13 +++++++------ snex2/settings.py | 2 +- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/gw/views.py b/gw/views.py index 4fb920a7..d3f0b840 100644 --- a/gw/views.py +++ b/gw/views.py @@ -1,3 +1,4 @@ +from django.shortcuts import render from django.conf import settings from django.http import HttpResponse from django.db import transaction @@ -12,16 +13,16 @@ from astropy.io import fits import sep from datetime import datetime, timedelta -from tom_nonlocalizedevents.models import EventSequence -from models import GWFollowupGalaxy -from forms import GWGalaxyObservationForm -from treasure_map_utils import build_tm_pointings, submit_tm_pointings +from tom_nonlocalizedevents.models import NonLocalizedEvent, EventSequence, EventLocalization +from gw.models import GWFollowupGalaxy +from gw.forms import GWGalaxyObservationForm +from gw.treasure_map_utils import build_tm_pointings, submit_tm_pointings from tom_common.hooks import run_hook from tom_targets.models import Target, TargetExtra from tom_observations.facility import get_service_class from tom_observations.models import ObservationRecord, ObservationGroup, DynamicCadence -from ..custom_code.hooks import _return_session -from ..custom_code.views import Snex1ConnectionError +from custom_code.hooks import _return_session +from custom_code.views import Snex1ConnectionError import logging logger = logging.getLogger(__name__) diff --git a/snex2/settings.py b/snex2/settings.py index d88b3845..f76bb6d9 100644 --- a/snex2/settings.py +++ b/snex2/settings.py @@ -286,7 +286,7 @@ FACILITIES = { 'LCO': { 'portal_url': 'https://observe.lco.global', - 'api_key': os.environ['LCO_APIKEY'], + 'api_key': os.getenv('LCO_APIKEY'), }, 'GEM': { 'portal_url': { From ec31233cf22795ad8739d3be50dcabeecc1a7c3c Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Tue, 4 Jun 2024 14:45:47 -0700 Subject: [PATCH 14/21] Updated settings.py and requirements.txt to use asynchronous calls. The template search will no longer be a hook, but will be called directly from the views.py. The run_template_search.py has also been updated to update the database. --- gw/hooks.py | 59 +--------------------------- gw/run_template_search.py | 81 ++++++++++++++++++++++++++++++++------- gw/views.py | 6 ++- requirements.txt | 2 + snex2/settings.py | 15 ++++++++ 5 files changed, 90 insertions(+), 73 deletions(-) diff --git a/gw/hooks.py b/gw/hooks.py index f3bc0c52..3c5e92a1 100644 --- a/gw/hooks.py +++ b/gw/hooks.py @@ -1,14 +1,12 @@ -import os from models import GWFollowupGalaxy from tom_common.hooks import run_hook -from tom_targets.models import Target, TargetExtra +from tom_targets.models import TargetExtra from tom_nonlocalizedevents.models import EventSequence from ..custom_code.views import cancel_observation, Snex1ConnectionError -from ..custom_code.hooks import _return_session, _load_table +from ..custom_code.hooks import _return_session from tom_observations.models import ObservationRecord import logging from django.conf import settings -from run_template_search import run_template_search logger = logging.getLogger(__name__) @@ -71,56 +69,3 @@ def cancel_gw_obs(galaxy_ids=[], sequence_id=None, wrapped_session=None): else: logger.info('Finished canceling GW follow-up observations for sequence {}'.format(sequence_id)) - -def ingest_gw_galaxy_into_snex1(target_list, wrapped_session=None): - - if wrapped_session: - db_session = wrapped_session - - else: - db_session = _return_session(settings.SNEX1_DB_URL) - - o4_galaxies = _load_table('o4_galaxies', db_address=settings.SNEX1_DB_URL) - - templates_to_download = [] - - for target_id, event_id in target_list: - - existing_target = db_session.query(o4_galaxies).filter(o4_galaxies.targetid==target_id) - if existing_target.count() > 0: - if any([t.event_id == event_id for t in existing_target]): - logger.info('Already ingested target {} into o4_galaxies table for event {}'.format(target_id, event_id)) - - else: - logger.info('Found existing target {} in o4_galaxies table, copying it'.format(target_id)) - existing_target_row = existing_target.first() - existing_table = existing_target_row.__table__ #Somehow this is different than the o4_galaxies object - - non_pk_columns = [k for k in existing_table.columns.keys() if k not in existing_table.primary_key.columns.keys()] - data = {c: getattr(existing_target_row, c) for c in non_pk_columns} - data['event_id'] = event_id - - db_session.add(o4_galaxies(**data)) - - else: - t = Target.objects.get(id=target_id) - templates_to_download.append([target_id, event_id, t.objname, t.ra, t.dec]) - - # This call will make everything stop until all templates are downloaded. - templates_paths = run_template_search(templates_to_download, _filters = 'griz', _surveys = ['PS1,SDSS'], _instruments = ['sinistro','muscat']) - - for id in templates_paths.keys(): - db_session.add(o4_galaxies(targetid=templates_paths[id].target_id, event_id=templates_paths[id].event_id, ra0=templates_paths[id].ra, dec0=templates_paths[id].dec, **templates_paths[id].templates_paths)) - - if not wrapped_session: - try: - db_session.commit() - except: - db_session.rollback() - finally: - db_session.close() - - else: - db_session.flush() - - logger.info('Finished ingesting target {} into o4_galaxies'.format(target_id)) diff --git a/gw/run_template_search.py b/gw/run_template_search.py index e5e2ac4a..1c468922 100644 --- a/gw/run_template_search.py +++ b/gw/run_template_search.py @@ -4,29 +4,82 @@ Script to search and (if exist) download template images from available optical surveys. The pixel scale, and the size of the resulting template will match the characteristics of the LCO -specified. +instrument specified. The objects will also be inserted in the +`o4_galaxy` table in the SNEx1 database, as well as the path to the +downloaded files. Note that the root folder of the download is +specified in the :__init__.py: of :survey_queries:. -The script is based on the :survey_queries: package. This is just -a wrapper to basically run the :query: class for a list of objects. +The :dramatiq: decorator makes it possible to run it asynchronously. """ from survey_queries.query import template_query +from ..custom_code.hooks import _return_session, _load_table +import logging +from django.conf import settings +from tom_targets.models import Target +import dramatiq -def run_template_search(_targets_list, _filters, _surveys, _instruments): +TABLE_NAME = 'o4_galaxies' +logger = logging.getLogger(__name__) - out_dict ={} - for target_id, event_id, name, ra, dec in _targets_list: - s = template_query(target_id, event_id, name, ra, dec, _filters, _instruments) - if 'PS1' in _surveys: - s.search_for_PS1() - if 'SDSS' in _surveys: - s.search_for_SDSS() - - out_dict[target_id] = s +@dramatiq.actor(max_retries=0) +def search_templates_and_update_snex1(_targets_list, _filters, _surveys, _instruments, _wrapped_session): + + if _wrapped_session: + db_session = _wrapped_session + + else: + db_session = _return_session(settings.SNEX1_DB_URL) + + gw_table = _load_table(TABLE_NAME, db_address=settings.SNEX1_DB_URL) + + templates_paths ={} + + for target_id, event_id in _targets_list: + + existing_target = db_session.query(gw_table).filter(gw_table.targetid==target_id) + if existing_target.count() > 0: + if any([t.event_id == event_id for t in existing_target]): + logger.info('Already ingested target {} into {} table for event {}'.format(target_id, TABLE_NAME, event_id)) + + else: + logger.info('Found existing target {} in {} table, copying it'.format(target_id, TABLE_NAME)) + existing_target_row = existing_target.first() + existing_table = existing_target_row.__table__ #Somehow this is different than the o4_galaxies object + + non_pk_columns = [k for k in existing_table.columns.keys() if k not in existing_table.primary_key.columns.keys()] + data = {c: getattr(existing_target_row, c) for c in non_pk_columns} + data['event_id'] = event_id + + db_session.add(gw_table(**data)) + + else: + t = Target.objects.get(id=target_id) + + s = template_query(target_id, event_id, t.objname, t.ra, t.dec, _filters, _instruments) + + if 'PS1' in _surveys: + s.search_for_PS1() + if 'SDSS' in _surveys: + s.search_for_SDSS() + + templates_paths[target_id] = s - return out_dict + for id in templates_paths.keys(): + db_session.add(gw_table(targetid=templates_paths[id].target_id, event_id=templates_paths[id].event_id, ra0=templates_paths[id].ra, dec0=templates_paths[id].dec, **templates_paths[id].templates_paths)) + + if not _wrapped_session: + try: + db_session.commit() + except: + db_session.rollback() + finally: + db_session.close() + else: + db_session.flush() + logger.info('Finished ingesting target {} into {}'.format(target_id, TABLE_NAME)) \ No newline at end of file diff --git a/gw/views.py b/gw/views.py index d3f0b840..11d23653 100644 --- a/gw/views.py +++ b/gw/views.py @@ -24,6 +24,7 @@ from custom_code.hooks import _return_session from custom_code.views import Snex1ConnectionError import logging +from run_template_search import search_templates_and_update_snex1 logger = logging.getLogger(__name__) @@ -306,8 +307,9 @@ def submit_galaxy_observations_view(request): #all_pointings += pointings ### Log the target in SNEx1 and ingest template images - run_hook('ingest_gw_galaxy_into_snex1', - snex1_targets, + ### and download templates. + ### This is run asynchronously + search_templates_and_update_snex1.send(snex1_targets, wrapped_session=db_session) diff --git a/requirements.txt b/requirements.txt index 3e714e4c..aa9c68c4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,6 +8,8 @@ gunicorn[gevent] django==4.2 django-heroku django-storages +django_dramatiq +redis boto3 requests_oauthlib python-magic diff --git a/snex2/settings.py b/snex2/settings.py index f76bb6d9..6b0c1a8c 100644 --- a/snex2/settings.py +++ b/snex2/settings.py @@ -44,6 +44,7 @@ 'django.contrib.staticfiles', 'django.contrib.sites', 'django_extensions', + 'django_dramatiq', 'guardian', 'tom_common', 'django_comments', @@ -549,6 +550,20 @@ } ] +DRAMATIQ_BROKER = { + "BROKER": "dramatiq.brokers.redis.RedisBroker", + "OPTIONS": { + "url": "redis://127.0.0.1:6379" + }, + "MIDDLEWARE": [ + "dramatiq.middleware.AgeLimit", + "dramatiq.middleware.TimeLimit", + "dramatiq.middleware.Callbacks", + "dramatiq.middleware.Retries", + "django_dramatiq.middleware.DbConnectionsMiddleware", + ] +} + if DEBUG: INTERNAL_IPS = [ '127.0.0.1', From 8f52dedf6718d859441800dad3a964bcf1b2cbd4 Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Tue, 4 Jun 2024 14:54:28 -0700 Subject: [PATCH 15/21] Removed some redundancies --- gw/run_template_search.py | 9 ++------- gw/survey_queries/query.py | 15 ++------------- 2 files changed, 4 insertions(+), 20 deletions(-) diff --git a/gw/run_template_search.py b/gw/run_template_search.py index 1c468922..623b64cb 100644 --- a/gw/run_template_search.py +++ b/gw/run_template_search.py @@ -13,7 +13,7 @@ """ -from survey_queries.query import template_query +from .survey_queries.query import template_query from ..custom_code.hooks import _return_session, _load_table import logging from django.conf import settings @@ -36,8 +36,6 @@ def search_templates_and_update_snex1(_targets_list, _filters, _surveys, _instru gw_table = _load_table(TABLE_NAME, db_address=settings.SNEX1_DB_URL) - templates_paths ={} - for target_id, event_id in _targets_list: existing_target = db_session.query(gw_table).filter(gw_table.targetid==target_id) @@ -66,10 +64,7 @@ def search_templates_and_update_snex1(_targets_list, _filters, _surveys, _instru if 'SDSS' in _surveys: s.search_for_SDSS() - templates_paths[target_id] = s - - for id in templates_paths.keys(): - db_session.add(gw_table(targetid=templates_paths[id].target_id, event_id=templates_paths[id].event_id, ra0=templates_paths[id].ra, dec0=templates_paths[id].dec, **templates_paths[id].templates_paths)) + db_session.add(gw_table(targetid = target_id, event_id = event_id, ra0=t.ra, dec0=t.dec, **s.templates_paths)) if not _wrapped_session: try: diff --git a/gw/survey_queries/query.py b/gw/survey_queries/query.py index 07e73f57..e72016ed 100644 --- a/gw/survey_queries/query.py +++ b/gw/survey_queries/query.py @@ -26,13 +26,11 @@ class template_query: ''' - def __init__(self, _target_id, _event_id, _obj, _ra, _dec, _filters, _inst): + def __init__(self, _obj, _ra, _dec, _filters, _inst): ''' Initializing the class with the name of the object :_obj:, the coordinates :_ra: and :_dec: and the filters :_filters: to - search. The target ID :_target_id: and the GW event ID :_event_id: - are also included fro completeness, but these are not used by - anything. + search. The template images will be resampled to match the resolution of the instrument (or list of instruments) provided by :_inst:. The template FOV will also be as big as to include the whole @@ -40,11 +38,6 @@ def __init__(self, _target_id, _event_id, _obj, _ra, _dec, _filters, _inst): The empty dictionary :templates_paths: is also created. *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* - :_target_id: Target ID - :type _target_id: int - - :_event_id: GW event ID - :type _event_id: int :_obj: Name of the object, mainly for file naming purposes :type _obj: string @@ -68,11 +61,7 @@ def __init__(self, _target_id, _event_id, _obj, _ra, _dec, _filters, _inst): ''' - self.target_id = _target_id - self.event_id = _event_id self.obj = _obj - self.ra = _ra - self.dec = _dec self.coord = SkyCoord(_ra, _dec, unit=(u.deg, u.deg)) self.filters = _filters From a39f62190e6baee5e01786b125815f256afeba75 Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Tue, 4 Jun 2024 15:00:25 -0700 Subject: [PATCH 16/21] Fixed some import definitions --- gw/hooks.py | 2 +- gw/views.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/gw/hooks.py b/gw/hooks.py index 3c5e92a1..680314ea 100644 --- a/gw/hooks.py +++ b/gw/hooks.py @@ -1,4 +1,4 @@ -from models import GWFollowupGalaxy +from .models import GWFollowupGalaxy from tom_common.hooks import run_hook from tom_targets.models import TargetExtra from tom_nonlocalizedevents.models import EventSequence diff --git a/gw/views.py b/gw/views.py index 11d23653..d3317921 100644 --- a/gw/views.py +++ b/gw/views.py @@ -14,17 +14,17 @@ import sep from datetime import datetime, timedelta from tom_nonlocalizedevents.models import NonLocalizedEvent, EventSequence, EventLocalization -from gw.models import GWFollowupGalaxy -from gw.forms import GWGalaxyObservationForm -from gw.treasure_map_utils import build_tm_pointings, submit_tm_pointings +from .models import GWFollowupGalaxy +from .forms import GWGalaxyObservationForm +from .treasure_map_utils import build_tm_pointings, submit_tm_pointings from tom_common.hooks import run_hook from tom_targets.models import Target, TargetExtra from tom_observations.facility import get_service_class from tom_observations.models import ObservationRecord, ObservationGroup, DynamicCadence -from custom_code.hooks import _return_session -from custom_code.views import Snex1ConnectionError +from ..custom_code.hooks import _return_session +from ..custom_code.views import Snex1ConnectionError import logging -from run_template_search import search_templates_and_update_snex1 +from .run_template_search import search_templates_and_update_snex1 logger = logging.getLogger(__name__) From d50a4c5672688b8fbf62ebc30f52a52d3065de76 Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Mon, 10 Jun 2024 11:22:37 -0700 Subject: [PATCH 17/21] bug fix --- gw/run_template_search.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gw/run_template_search.py b/gw/run_template_search.py index 623b64cb..c924e026 100644 --- a/gw/run_template_search.py +++ b/gw/run_template_search.py @@ -57,7 +57,7 @@ def search_templates_and_update_snex1(_targets_list, _filters, _surveys, _instru else: t = Target.objects.get(id=target_id) - s = template_query(target_id, event_id, t.objname, t.ra, t.dec, _filters, _instruments) + s = template_query(t.objname, t.ra, t.dec, _filters, _instruments) if 'PS1' in _surveys: s.search_for_PS1() From 71545f25e288881d803ccf1ac1316f2cdb726758 Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Wed, 12 Jun 2024 10:21:25 -0700 Subject: [PATCH 18/21] Added telescopeid and instrumentid to the SURVEYS dictionary. Working on updating pholco table, not tested. Working on DECam_search, still far from complete --- gw/run_template_search.py | 74 +++++++++++++++++++++++++++++-- gw/survey_queries/DECam_search.py | 7 +-- gw/survey_queries/__init__.py | 8 ++-- gw/survey_queries/query.py | 1 + gw/views.py | 6 ++- 5 files changed, 85 insertions(+), 11 deletions(-) diff --git a/gw/run_template_search.py b/gw/run_template_search.py index c924e026..f647469c 100644 --- a/gw/run_template_search.py +++ b/gw/run_template_search.py @@ -14,12 +14,12 @@ """ from .survey_queries.query import template_query -from ..custom_code.hooks import _return_session, _load_table +from custom_code.hooks import _return_session, _load_table import logging from django.conf import settings from tom_targets.models import Target import dramatiq - +import datetime TABLE_NAME = 'o4_galaxies' @@ -35,6 +35,7 @@ def search_templates_and_update_snex1(_targets_list, _filters, _surveys, _instru db_session = _return_session(settings.SNEX1_DB_URL) gw_table = _load_table(TABLE_NAME, db_address=settings.SNEX1_DB_URL) + photlco = _load_table('photlco', db_address=settings.SNEX1_DB_URL) for target_id, event_id in _targets_list: @@ -66,6 +67,10 @@ def search_templates_and_update_snex1(_targets_list, _filters, _surveys, _instru db_session.add(gw_table(targetid = target_id, event_id = event_id, ra0=t.ra, dec0=t.dec, **s.templates_paths)) + for template in s.templates_paths: + db_session.add(photlco(**photlco_dict(target_id, s.templates_paths[template]))) + + if not _wrapped_session: try: db_session.commit() @@ -77,4 +82,67 @@ def search_templates_and_update_snex1(_targets_list, _filters, _surveys, _instru else: db_session.flush() - logger.info('Finished ingesting target {} into {}'.format(target_id, TABLE_NAME)) \ No newline at end of file + logger.info('Finished ingesting target {} into {}'.format(target_id, TABLE_NAME)) + +def photlco_dict(_targetid, _template_path): + + from survey_queries import SURVEY + + templeate_header = pf.getheader(_template_path) + path_list = _template_path.spit('/') + + now = datetime.datetime.now(datetime.UTC).strftime("%Y-%m-%d %H:%M:%S.%f") + + phot_lco_dic = { + 'targetid': _targetid, + 'objname': templeate_header['OBJECT'], + 'dayobs': templeate_header['DAY-OBS'], + 'dateobs': templeate_header['DATE-OBS'].split('T')[0], + 'ut': templeate_header['DATE-OBS'].split('T')[1], + 'mjd': templeate_header['MJD-OBS'], + 'exptime':1, + 'filter': templeate_header['FILTER'], + 'telescopeid': SURVEY[templeate_header['SURVEY']].telescopeid, + 'instrumentid': SURVEY[templeate_header['SURVEY']].instrumentid, + 'telescope': templeate_header['TELESCOP'], + 'instrument': templeate_header['INSTRUME'], + 'mag': 9999, + 'dmag': 9999, + 'airmass': 1, + 'wcs': 0, + 'psf': 'X', + 'apmag': 9999, + 'psfx': 9999, + 'psfy': 9999, + 'psfmag': 9999, + 'psfdmag': 9999, + 'z1': 9999, + 'z2': 9999, + 'zn': 9999, + 'c1': 9999, + 'c2': 9999, + 'dz1': 9999, + 'dz2': 9999, + 'dc1': 9999, + 'dc2': 9999, + 'quality': 127, + 'zcat': 'X', + 'abscat': 'X', + 'fwhm': 9999, + 'magtype': 1, + 'ra0': templeate_header['RA'], + 'dec0': templeate_header['DEC'], + 'tracknumber': 0, + 'filename': path_list[-1], + 'filetype': 4, + 'filepath': '/'.join(path_list[:-1])+'/', + 'groupidcode': 2199023255552, + 'datecreated': now, + 'lastmodified': now, + 'apflux': 9999, + 'dapflux': 9999, + 'dapmag': 9999, + 'lastunpacked': now + } + + return phot_lco_dic diff --git a/gw/survey_queries/DECam_search.py b/gw/survey_queries/DECam_search.py index 11f4d397..2496c471 100644 --- a/gw/survey_queries/DECam_search.py +++ b/gw/survey_queries/DECam_search.py @@ -15,8 +15,8 @@ # https://datalab.noirlab.edu/docs/manual/UsingAstroDataLab/DataAccessInterfaces/SimpleImageAccessSIA/SimpleImageAccessSIA.html #COLLECTION = 'coadd_all' COLLECTION = 'ls_dr9' -COLLECTION = 'coadd/decaps_dr2' -COLLECTION = 'delve_dr2' +# COLLECTION = 'coadd/decaps_dr2' +# COLLECTION = 'delve_dr2' DECAM_SERVICE = f'https://datalab.noirlab.edu/sia/{COLLECTION}' @@ -57,6 +57,7 @@ def search_for_DECam(self, survey): connect = sia.SIAService(DECAM_SERVICE) table = connect.search(pos = (self.coord.ra.deg, self.coord.dec.deg), size = (LCO_INSTRUMENTS['sinistro'].fov.to(u.deg).value, LCO_INSTRUMENTS['sinistro'].fov.to(u.deg).value), verbosity=2).to_table() + print('something') with open('table.csv','w') as out: for c in table.columns: out.write(c+',') @@ -65,7 +66,7 @@ def search_for_DECam(self, survey): for c in table.columns: out.write(str(table[c][i])+',') out.write('\n') - + exit() diff --git a/gw/survey_queries/__init__.py b/gw/survey_queries/__init__.py index 55d44c3c..f0935f62 100644 --- a/gw/survey_queries/__init__.py +++ b/gw/survey_queries/__init__.py @@ -15,9 +15,11 @@ def __init__(self, _fov, _resolution): self.resolution = _resolution class Survey: - def __init__(self, _telescope, _instrument, _skycell_size, _track_skycells_in_header): + def __init__(self, _telescope, _telescopeid, _instrument, _instrumentid, _skycell_size, _track_skycells_in_header): self.telescope = _telescope + self.telescopeid = _telescopeid self.instrument = _instrument + self.instrumentid = _instrumentid self.skycell_size = _skycell_size self.track_skycells_in_header = _track_skycells_in_header @@ -33,8 +35,8 @@ def __init__(self, _telescope, _instrument, _skycell_size, _track_skycells_in_he } SURVEYS ={ - 'PS1': Survey(_telescope = 'PS1', _instrument = 'GPC1', _skycell_size = 0.4*u.deg, _track_skycells_in_header = PanSTARRS_search.track_skycells_in_header), - 'SDSS': Survey(_telescope = '2.5m', _instrument = 'SDSS', _skycell_size = 10*u.arcmin, _track_skycells_in_header = SDSS_search.track_skycells_in_header) + 'PS1': Survey(_telescope = 'PS1', _telescopeid = 41, _instrument = 'GPC1', _instrumentid = 191, _skycell_size = 0.4*u.deg, _track_skycells_in_header = PanSTARRS_search.track_skycells_in_header), + 'SDSS': Survey(_telescope = 'APO 2.5m', _telescopeid = 139, _instrument = 'SDSS', _instrumentid = 42, _skycell_size = 10*u.arcmin, _track_skycells_in_header = SDSS_search.track_skycells_in_header) } def generate_FOV_grid(_center, _fov, _step, circle=True, wcs = None): diff --git a/gw/survey_queries/query.py b/gw/survey_queries/query.py index e72016ed..eb3fa81d 100644 --- a/gw/survey_queries/query.py +++ b/gw/survey_queries/query.py @@ -172,6 +172,7 @@ def make_templates(self, _data, _weights, _survey): header['DAY-OBS'] = header['DATE-OBS'].split('T')[0].replace('-', '') header['TELESCOP'] = SURVEYS[_survey].telescope header['INSTRUME'] = SURVEYS[_survey].instrument + header['SURVEY'] = _survey header['OBJECT'] = self.obj header['FILTER'] = flt+'p' # NOTE: this works only with griz header['AIRMASS'] = 1 diff --git a/gw/views.py b/gw/views.py index d3317921..3a65e0a4 100644 --- a/gw/views.py +++ b/gw/views.py @@ -21,8 +21,8 @@ from tom_targets.models import Target, TargetExtra from tom_observations.facility import get_service_class from tom_observations.models import ObservationRecord, ObservationGroup, DynamicCadence -from ..custom_code.hooks import _return_session -from ..custom_code.views import Snex1ConnectionError +from custom_code.hooks import _return_session +from custom_code.views import Snex1ConnectionError import logging from .run_template_search import search_templates_and_update_snex1 @@ -154,6 +154,8 @@ def submit_galaxy_observations_view(request): galaxy_ids = json.loads(request.GET['galaxy_ids'])['galaxy_ids'] galaxies = GWFollowupGalaxy.objects.filter(id__in=galaxy_ids) + db_session = _return_session() + try: db_session = _return_session() failed_obs = [] From 7c222b1542284c80c543bb0ca4248de7a079e6e2 Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Wed, 12 Jun 2024 14:58:14 -0700 Subject: [PATCH 19/21] tmp --- gw/survey_queries/DECam_search.py | 34 +++++++++++++++++++++++++++++++ gw/views.py | 2 -- 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/gw/survey_queries/DECam_search.py b/gw/survey_queries/DECam_search.py index 2496c471..11d46182 100644 --- a/gw/survey_queries/DECam_search.py +++ b/gw/survey_queries/DECam_search.py @@ -17,6 +17,8 @@ COLLECTION = 'ls_dr9' # COLLECTION = 'coadd/decaps_dr2' # COLLECTION = 'delve_dr2' +DECAM_SERVICE = f'https://datalab.noirlab.edu/tap/{COLLECTION}' + DECAM_SERVICE = f'https://datalab.noirlab.edu/sia/{COLLECTION}' @@ -54,6 +56,38 @@ def search_for_DECam(self, survey): from . import generate_FOV_grid, LCO_INSTRUMENTS, SURVEYS + # Survey: + # + # + + + + 'https://datalab.noirlab.edu/query/query?sql=" + survey_temp + "+q3c_radial_query(s_ra,+s_dec,+" + items[_i_ra] + ",+" + items[_i_dec] + ",+2)+)+SELECT+access_url,instrument_name,obs_bandpass,exptime,prodtype,proctype,date_obs+FROM+region+WHERE+(abs(spat_hilimit1+-+spat_lolimit1)+<+90.0+AND+("+items[_i_ra]+"+BETWEEN+spat_lolimit1+AND+spat_hilimit1)+AND+("+items[_i_dec]+"+BETWEEN+spat_lolimit2+AND+spat_hilimit2))+OR+(abs(spat_hilimit1+-+spat_lolimit1)+>+90.0+AND+(+("+items[_i_ra]+"+BETWEEN+0.0+AND+spat_lolimit1)+OR+("+items[_i_ra]+"+BETWEEN+spat_hilimit1+AND+360.0))+AND+("+items[_i_dec]+"+BETWEEN+spat_lolimit2+AND+spat_hilimit2))+ORDER+BY+date_obs+DESC&ofmt=csv&out=none&async=false";' + + connect = sia.SIAService(DECAM_SERVICE) table = connect.search(pos = (self.coord.ra.deg, self.coord.dec.deg), size = (LCO_INSTRUMENTS['sinistro'].fov.to(u.deg).value, LCO_INSTRUMENTS['sinistro'].fov.to(u.deg).value), verbosity=2).to_table() diff --git a/gw/views.py b/gw/views.py index 3a65e0a4..ae52a04a 100644 --- a/gw/views.py +++ b/gw/views.py @@ -154,8 +154,6 @@ def submit_galaxy_observations_view(request): galaxy_ids = json.loads(request.GET['galaxy_ids'])['galaxy_ids'] galaxies = GWFollowupGalaxy.objects.filter(id__in=galaxy_ids) - db_session = _return_session() - try: db_session = _return_session() failed_obs = [] From aa4afc935e40d552c075135a000825481395afb9 Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Thu, 13 Jun 2024 15:58:24 -0700 Subject: [PATCH 20/21] tmp, not working --- gw/run_template_search.py | 7 +++++-- snex2/settings.py | 1 + 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/gw/run_template_search.py b/gw/run_template_search.py index f647469c..d91ffed4 100644 --- a/gw/run_template_search.py +++ b/gw/run_template_search.py @@ -22,11 +22,14 @@ import datetime TABLE_NAME = 'o4_galaxies' +DEFAULT_FILTERS = 'gri' +DEFAULT_SURVEYS = ['PS1','SDSS'] +DEFAULT_INSTRUMENTS = 'all' logger = logging.getLogger(__name__) @dramatiq.actor(max_retries=0) -def search_templates_and_update_snex1(_targets_list, _filters, _surveys, _instruments, _wrapped_session): +def search_templates_and_update_snex1(_targets_list, _wrapped_session, _filters=DEFAULT_FILTERS, _surveys=DEFAULT_SURVEYS, _instruments=DEFAULT_INSTRUMENTS): if _wrapped_session: db_session = _wrapped_session @@ -58,7 +61,7 @@ def search_templates_and_update_snex1(_targets_list, _filters, _surveys, _instru else: t = Target.objects.get(id=target_id) - s = template_query(t.objname, t.ra, t.dec, _filters, _instruments) + s = template_query(t.name, t.ra, t.dec, _filters, _instruments) if 'PS1' in _surveys: s.search_for_PS1() diff --git a/snex2/settings.py b/snex2/settings.py index 6b0c1a8c..cf7e8678 100644 --- a/snex2/settings.py +++ b/snex2/settings.py @@ -563,6 +563,7 @@ "django_dramatiq.middleware.DbConnectionsMiddleware", ] } +DRAMATIQ_AUTODISCOVER_MODULES = ["run_template_search"] if DEBUG: INTERNAL_IPS = [ From aa5920cfac2c17735334ae8f1c336dece4592665 Mon Sep 17 00:00:00 2001 From: Giacomo Terreran Date: Wed, 24 Jul 2024 16:24:54 -0700 Subject: [PATCH 21/21] added triplets functionality for GW observations with performed subtraction --- gw/templatetags/gw_tags.py | 13 +++++--- gw/views.py | 67 ++++++++++++++++++++++++-------------- 2 files changed, 50 insertions(+), 30 deletions(-) diff --git a/gw/templatetags/gw_tags.py b/gw/templatetags/gw_tags.py index 22e6dc16..5cf5115f 100644 --- a/gw/templatetags/gw_tags.py +++ b/gw/templatetags/gw_tags.py @@ -80,8 +80,11 @@ def galaxy_distribution(context, galaxies): return {'figure': figure} -@register.inclusion_tag('gw/plot_triplets.html', takes_context=True) -def plot_triplets(context, triplet, galaxy, display_type): +@register.inclusion_tag('gw/plot_triplets.html')#, takes_context=True) +def plot_triplets(triplet, galaxy, display_type): + + #This can be galaxy sizes times some factor. + SIZE = 0.9/60 #deg plot_context = {} @@ -95,10 +98,10 @@ def plot_triplets(context, triplet, galaxy, display_type): if display_type == 'list': ###TODO: Change this: - galaxy_coord = SkyCoord(228.691875, 31.223633, unit='deg')#galaxy.ra, galaxy.dec, unit='deg') + #galaxy_coord = SkyCoord(228.691875, 31.223633, unit='deg')#galaxy.ra, galaxy.dec, unit='deg') #galaxy_pix_ra, galaxy_pix_dec = skycoord_to_pixel(galaxy_coord, wcs) - img_coord_lower = SkyCoord(228.691875-0.9/60, 31.223633-0.9/60, unit='deg') - img_coord_upper = SkyCoord(228.691875+0.9/60, 31.223633+0.9/60, unit='deg') + img_coord_lower = SkyCoord(galaxy.ra-SIZE, galaxy.dec-SIZE, unit='deg') + img_coord_upper = SkyCoord(galaxy.ra+SIZE, galaxy.dec+SIZE, unit='deg') img_pixel_upper_ra, img_pixel_lower_dec = skycoord_to_pixel(img_coord_lower, wcs) img_pixel_lower_ra, img_pixel_upper_dec = skycoord_to_pixel(img_coord_upper, wcs) diff --git a/gw/views.py b/gw/views.py index ae52a04a..d7e6d276 100644 --- a/gw/views.py +++ b/gw/views.py @@ -21,7 +21,7 @@ from tom_targets.models import Target, TargetExtra from tom_observations.facility import get_service_class from tom_observations.models import ObservationRecord, ObservationGroup, DynamicCadence -from custom_code.hooks import _return_session +from custom_code.hooks import _return_session, _load_table from custom_code.views import Snex1ConnectionError import logging from .run_template_search import search_templates_and_update_snex1 @@ -62,6 +62,13 @@ class EventSequenceGalaxiesTripletView(TemplateView, LoginRequiredMixin): template_name = 'gw/galaxy_observations.html' def get_context_data(self, **kwargs): + + db_session = _return_session(settings.SNEX1_DB_URL) + + o4_galaxies = _load_table('o4_galaxies', db_address = settings.SNEX1_DB_URL) + photlco = _load_table('photlco', db_address = settings.SNEX1_DB_URL) + + context = super().get_context_data(**kwargs) sequence = EventSequence.objects.get(id=self.kwargs['id']) @@ -70,42 +77,52 @@ def get_context_data(self, **kwargs): galaxies = GWFollowupGalaxy.objects.filter(eventlocalization=loc) galaxies = galaxies.annotate(name=F("id")) context['galaxy_count'] = len(galaxies) - #TODO: Filter galaxies by observations, but for now we'll just take a subset and fake it context['superevent_id'] = sequence.nonlocalizedevent.event_id context['superevent_index'] = sequence.nonlocalizedevent.id + existing_observations = db_session.query(photlco).filter(photlco.targetid==o4_galaxies.targetid).filter(o4_galaxies.event_id == sequence.nonlocalizedevent.event_id) + + triplets=[] + + for t in existing_observations: + if t.filetype==3: + + diff_file = os.path.join(t.filepath, t.filename) + + temp_filename = fits.getheader(diff_file)['TEMPLATE'] + temp_filepath = [el.filepath for el in existing_observations if el.filename==temp_filename][0] + + triplet={ + #'galaxy': galaxy, + 'obsdate': t.dateobs, + 'filter': t.filter, + 'exposure_time': t.exptime, + 'original': {'filename': '.'.join(diff_file.split('.')[:-3])+'.fits'}, + 'template': {'filename': os.path.join(temp_filepath, temp_filename)}, + 'diff': {'filename': diff_file} + } + + triplets.append(triplet) + rows = [] - #TODO: Populate this dynamically - for galaxy in galaxies[:1]: - - row = {'galaxy': galaxy, - 'triplets': [{ - 'obsdate': '2023-04-19', - 'filter': 'g', - 'exposure_time': 200, - 'original': {'filename': os.path.join(BASE_DIR, 'data/fits/gw/obs.fits')}, - 'template': {'filename': os.path.join(BASE_DIR, 'data/fits/gw/ref.fits')}, - 'diff': {'filename': os.path.join(BASE_DIR, 'data/fits/gw/sub.fits')} - }, - #{ - # 'obsdate': '2023-04-19', - # 'filter': 'g', - # 'exposure_time': 200, - # 'original': {'filename': os.path.join(BASE_DIR, 'data/fits/gw/obs.fits')}, - # 'template': {'filename': os.path.join(BASE_DIR, 'data/fits/gw/ref.fits')}, - # 'diff': {'filename': os.path.join(BASE_DIR, 'data/fits/gw/sub.fits')} - #} - ] - } + for galaxy in galaxies: + if len(triplets)!=0: + row = { + 'galaxy': galaxy, + 'triplets':triplets + } + + + rows.append(row) context['rows'] = rows return context - +#this is not yet implemented class GWFollowupGalaxyTripletView(TemplateView, LoginRequiredMixin): template_name = 'gw/galaxy_observations_individual.html'