Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions examples/README.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,12 @@ Example Gallery
***************

The gallery contains examples of how to use XRTpy.
========================
XRT Stuff
========================
- **Removing Light Leak**
========================
XRT FOV GUI
========================

- **FOV tool test**
82 changes: 82 additions & 0 deletions examples/preview_observation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
"""
=============================
FOV tool test
=============================

We are just testing stuff here
"""

import time as timer
import matplotlib.pyplot as plt
from astropy.io import fits
import astropy.time
import astropy.units as u

import sunpy
import ipywidgets as widgets
from ipywidgets import Layout, interact, IntSlider,IntProgress, RadioButtons, FloatSlider,FloatRangeSlider
import numpy as np
import matplotlib.pyplot as plt
from ipywidgets import interact
import numpy as np
from sunpy.net import Fido
from sunpy.net import attrs as a
import matplotlib.dates as mdates

import sys
sys.path.append('/Users/ntrueba/SOLAR/code/GIT/xrtpy/xrtpy/visualization/fov/') #
import metadata_manager as ObsMeta

##############################################################################
# FIDO SEARCH
# The tool is meant to preview metadata within the fido ecosystem - this means that fido is the slowest part of the code unless you are looking at a very small time frame

# Define the time range of interest for solar observations
time_range_1 = a.Time("2011-06-07 06:00:00", "2011-06-07 06:45:54")
time_range_2 = a.Time("2007-12-17 10:40:00", "2007-12-17 13:00:54")

# Specify the instrument as 'xrt' to search for Hinode X-Ray Telescope data
instrument = a.Instrument("xrt")


# This will return a catalog of available XRT data during t≠he specified period
xrt_downloaded_files_1 = Fido.search(time_range_1, instrument)
xrt_downloaded_files_2 = Fido.search(time_range_2, instrument)



##############################################################################
#Metadata extraction and the XRT_meta structure
#The first function accepts the output of the fido search as an input and retrieves the corresponding metadata without having to download the data.
#By default, it downloads the level0 metadata (same as SSWIDL) which is very quick when dealing with many files. If you want the level 1 metadata, the syntax is (.., fast_bool = False) - this is only recommended for short observations (< 1hr), as it can retrieve metadata at a rate 5 obs/second
#The second function creates a handy metadata object for all the observations
#it contains a list of headers (xmeta.head_lis)
#and a dictonary (xmeta.metadata) containing important filter-separated quantities
#This could be super handy, as you get much more information than what fido gives you, so you can download observations that meet very specific conditions - let me know if you want something specific included here



xrt_dset1 = ObsMeta.DatasetMetaManager(xrt_downloaded_files_1)
xrt_dset2 = ObsMeta.DatasetMetaManager(xrt_downloaded_files_2)##

##############################################################################
#Plotting backend
#The %matplotlib inline works best when using notebooks to avoid flickering. Ipywidgets is not available with HTML, but you can uncomment this line when you download the notebook.
#%matplotlib inline
###
ani = xrt_dset2.plot_preview(ani_bool = False, d_mode=False)
plt.show()

##############################################################################
###
#We can also do nightmode

ani = xrt_dset1.plot_preview(ani_bool = False, d_mode=True, vertical_plot=True)##
plt.show()

##############################################################################
###
#We can also do a horizontal version

ani = xrt_dset2.plot_preview(ani_bool = False, d_mode=True, vertical_plot=False)
plt.show()
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ dependencies = [
"scikit-image>=0.21",
"scipy>=1.11.1",
"sunpy[map]>=5.1",
"ipywidgets>=8.0.0",
]

optional-dependencies.dev = [
Expand Down
174 changes: 174 additions & 0 deletions visualization/fov/metadata_downloader.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,174 @@
from astropy.io import fits
import astropy.units as u

import numpy as np

from astropy.coordinates import SkyCoord

from sunpy.coordinates import frames
from astropy.time import Time
import scipy.io as sio
from scipy.io import readsav
import urllib.request


def download_metadata(xrt_downloaded_files, filen, overwrite=False):
url_lis = xrt_downloaded_files[0][:]['fileid']
url_str_lis = []
for i in range(len(url_lis)):
url_str_lis.append(url_lis[i])
primary_hdu = fits.PrimaryHDU(data=np.ones((3, 3)))
c1 = fits.Column(name='URL', array=url_str_lis, format='100A')
c2 = fits.Column(name='header_int', array=np.asarray(range(len(url_str_lis)))+2, format='J')
table_hdu = fits.BinTableHDU.from_columns([c1, c2])

hdul2 = fits.HDUList([primary_hdu, table_hdu])
#hlis = []

for i in range(len(url_str_lis)):
if (i%10 == 0):
print(int(1000.0*i/len(url_str_lis))/10.0,'%')
fsspec_kwargs = {"block_size": 100_000, "cache_type": "bytes"}
with fits.open(url_lis[i], use_fsspec=True, fsspec_kwargs=fsspec_kwargs) as hdul:
#hlis.append(hdul[0].header)
# Download a single header
t_header = hdul[0].header
image_hdu = fits.ImageHDU(data=np.ones((100, 100)), header=t_header,name="header"+str(i))
hdul2.append(image_hdu)
return hdul2

#hdul2.writeto(filen,overwrite=overwrite)

def date_to_meta(xrt_download_list):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
def date_to_meta(xrt_download_list):
def _date_to_meta(xrt_download_list):

If there's a function that is not intended for end users, then we can start the name with an underscore to indicate that it is private.

time_lis = xrt_download_list[0]['Start Time']
year_lis = xrt_download_list[0]['Start Time'].ymdhms.year
month_lis = xrt_download_list[0]['Start Time'].ymdhms.month
day_lis = xrt_download_list[0]['Start Time'].ymdhms.day
new_date = []
file_lis = []
for i in range(len(time_lis)):
year_str = str(year_lis[i])
month_str = str(month_lis[i])
day_str = str(day_lis[i])
if len(day_str) < 2:
day_str = '0'+day_str
if len(month_str) < 2:
month_str = '0'+month_str
ndatei = year_str + month_str+ day_str
if ndatei in file_lis:
new_date.append(file_lis.index(ndatei))
#new_date[file_lis.index(ndatei)].append(ndatei)
else:
file_lis.append(ndatei)
new_date.append(file_lis.index(ndatei))
#print(ndatei,file_lis.index(ndatei))
return file_lis, new_date

def get_urls(file_n_lis, ggg):
nfile = len(file_n_lis)
geny_lis = []
for i in range(nfile):
find_url = 'xrt'+file_n_lis[i]
findex = ggg.find(find_url)
gen_fn = ggg[findex:findex+35]
findex2 = gen_fn.find('geny')
gen_fn = gen_fn[:findex2+4]
geny_lis.append(gen_fn)
return geny_lis

def get_metafile(geny_lis):
url_start = 'https://sot.lmsal.com/data/sot/metadata/sswdb/hinode/xrt/xrt_genxcat/'
ngeny = len(geny_lis)
meta_lis = []
for i in range(ngeny):
print(i)
gen_fn = geny_lis[i]
f, h = urllib.request.urlretrieve(url_start + gen_fn)
print(i)
data2 = readsav(f)["p0"]
data_dict2 = {k : data2[k] for k in data2.dtype.names}
meta_lis.append(data_dict2)
return meta_lis

#def mk_meta_header(meta_lis):
#print(data_dict2['DATE_OBS'])

def meta_to_dict(data_dict, di):
dkeys = data_dict.keys()
hdict = {}
for dki in dkeys:
try:
hdict[dki] = data_dict[dki][di].decode('ascii')
except:
hdict[dki] = data_dict[dki][di]
return hdict

def match_vso_to_cat(data_dict_lis, cat_fi, xrt_download):

n_dict = len(data_dict_lis)
cat_time_lis = []
for i in range(n_dict):
data_dict = data_dict_lis[i]
date_obs_cat = data_dict['DATE_OBS']
cat_len = len(date_obs_cat)
cat_str = []
for cat_bin in date_obs_cat:
cat_str.append(cat_bin.decode('ascii'))
cat_time = Time(np.asarray(cat_str), format='isot', scale='utc')
cat_time_lis.append(cat_time)
#print('yo')
min_ti_lis = []
delt_lis = []
delt_lisp = []
delt_lism = []
header_lis = []
for i in range(len(xrt_download[0]['Start Time'])):
cat_time = cat_time_lis[cat_fi[i]]
stime = xrt_download[0]['Start Time'][i]
delt = cat_time - stime
delt = delt.value*24.0*3600.0
min_ti = np.argmin(np.abs(delt))
min_ti_lis.append(min_ti)
delt_lis.append(delt[min_ti])
try:
delt_lisp.append(delt[min_ti+1])
delt_lism.append(delt[min_ti-1])
except:
print()
header_lis.append(meta_to_dict(data_dict_lis[cat_fi[i]],min_ti))
return header_lis

def get_html_lis():
url_start = 'https://sot.lmsal.com/data/sot/metadata/sswdb/hinode/xrt/xrt_genxcat/'
with urllib.request.urlopen(url_start) as response:

html = response.read()
#print(response.info())
#print()
ggg = html.decode('utf-8')
return ggg

def download_metadata_fast(xrt_downloaded_files, ggg=None):
if (ggg == None):
ggg = get_html_lis()
file_lis, new_date = date_to_meta(xrt_downloaded_files)
genyl = get_urls(file_lis, ggg)
print('downloading')
tmeta_lis = get_metafile(genyl)
print('done')
hlis3 = match_vso_to_cat(tmeta_lis, new_date, xrt_downloaded_files)
return hlis3

def fetch_metadata(xrt_downloaded_files, fast_bool = True):
if fast_bool:
print('Fast Metadata (Level 0)')
return download_metadata_fast(xrt_downloaded_files, ggg=None)
else:
print('Slow Metadata (Level 1)')
hdul = download_metadata(xrt_downloaded_files,'')
hlis = []
for i in range(len(xrt_downloaded_files[0])):
hlis.append(hdul[i+2].header)
return hlis


Loading
Loading