Skip to content

Commit

Permalink
Orsay feedback - second round (#557)
Browse files Browse the repository at this point in the history
* Make color plot zero line more apparent

* Fix the object name in help popup to be actually visible in ZTF data

* CSV download options for main object and SSO tabs

* Make suggestion window below search bar clickable (submits the input)

* Use `radius` as an alias for `r`, and use it in the quick fields

* Show a badge for cdsxmatch in search results, when it is different from the main classification

* Remove cutout fields from API output when they are not requested

* Make TNS reverse search less greedy by using prefix search instead of substring

* Use slightly different colors for negative detections in light curves

* Make upper limit points semi-transparent to improve overall readability of the plots

* Fix the datesearch test, sort of.

* Fix ssoft test too

* Add GCVS and VSX badges, if corresponding fields are set

* Reset active page number to 1 on every new search

* Enable substring matching for autocomplete options

* Add placeholders for some layout elements to reduce visual re-layouting

* Simplify the call graphs by removing unnecessary calls

* Fix cropping of table fields dropdown on small table heights

* Add VSX/GCVS classes to Neighbourhood card

* Change the page title based on its path

* Disable 'Updating...' in the page title

* Add positions of closest reference objects to Aladin map. Allows quickly seeing when there are several different ones being used over the lightcurve.

* Minor optimization for decoding HBase output TreeMap

* Prevent more unneeded callback calls

* Use dcc.Store instead of hidden Divs to store object data

* Optimization of hbase output conversion routines

* Vectorize date conversions to speed them up

* Add external link to DataCentral aggregator

* Minor fixes
  • Loading branch information
karpov-sv authored Jan 17, 2024
1 parent 5cecefb commit eb75e6b
Show file tree
Hide file tree
Showing 16 changed files with 406 additions and 338 deletions.
4 changes: 2 additions & 2 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,10 +61,10 @@
}],
long_callback_manager=long_callback_manager,
background_callback_manager=background_callback_manager,
update_title=None,
title='Fink Science Portal'
)


app.title = 'Fink Science Portal'
nlimit = 10000

app.server.config['MAX_CONTENT_LENGTH'] = 100 * 1024 * 1024
Expand Down
30 changes: 16 additions & 14 deletions apps/api/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
from apps.utils import extract_cutouts
from apps.utils import hbase_type_converter
from apps.utils import isoify_time
from apps.utils import hbase_to_dict

from apps.euclid.utils import load_euclid_header
from apps.euclid.utils import add_columns
Expand Down Expand Up @@ -140,8 +141,8 @@ def return_object_pdf(payload: dict) -> pd.DataFrame:
)
resultsUP.update(resultUP)

pdfU = pd.DataFrame.from_dict(resultsU, orient='index')
pdfUP = pd.DataFrame.from_dict(resultsUP, orient='index')
pdfU = pd.DataFrame.from_dict(hbase_to_dict(resultsU), orient='index')
pdfUP = pd.DataFrame.from_dict(hbase_to_dict(resultsUP), orient='index')

pdf['d:tag'] = 'valid'
pdfU['d:tag'] = 'upperlim'
Expand Down Expand Up @@ -607,7 +608,7 @@ def return_ssocand_pdf(payload: dict) -> pd.DataFrame:
return pd.DataFrame({})

# Construct the dataframe
pdf = pd.DataFrame.from_dict(results, orient='index')
pdf = pd.DataFrame.from_dict(hbase_to_dict(results), orient='index')

if 'key:time' in pdf.columns:
pdf = pdf.drop(columns=['key:time'])
Expand Down Expand Up @@ -721,7 +722,7 @@ def format_and_send_cutout(payload: dict) -> pd.DataFrame:
results = client.scan(
"",
"key:key:{}".format(payload['objectId']),
"b:cutout{}_stampData,i:jd,i:candid".format(payload['kind']),
"b:cutout{}_stampData,i:objectId,i:jd,i:candid".format(payload['kind']),
0, True, True
)

Expand Down Expand Up @@ -1080,7 +1081,7 @@ def return_statistics_pdf(payload: dict) -> pd.DataFrame:
cols,
0, True, True
)
pdf = pd.DataFrame.from_dict(results, orient='index')
pdf = pd.DataFrame.from_dict(hbase_to_dict(results), orient='index')

client.close()

Expand Down Expand Up @@ -1389,7 +1390,8 @@ def return_resolver_pdf(payload: dict) -> pd.DataFrame:
else:
# TNS poll -- take the first nmax occurences
if reverse:
to_evaluate = "d:internalname:{}".format(name)
# Prefix search on second part of the key which is `fullname_internalname`
to_evaluate = "key:key:_{}:substring".format(name)
results = client.scan(
"",
to_evaluate,
Expand All @@ -1409,7 +1411,7 @@ def return_resolver_pdf(payload: dict) -> pd.DataFrame:
# Restore default limits
client.close()

pdfs = pd.DataFrame.from_dict(results, orient='index')
pdfs = pd.DataFrame.from_dict(hbase_to_dict(results), orient='index')
elif resolver == 'simbad':
client = connect_to_hbase_table('ztf')
if reverse:
Expand All @@ -1422,7 +1424,7 @@ def return_resolver_pdf(payload: dict) -> pd.DataFrame:
0, False, False
)
client.close()
pdfs = pd.DataFrame.from_dict(results, orient='index')
pdfs = pd.DataFrame.from_dict(hbase_to_dict(results), orient='index')
else:
r = requests.get(
'http://cds.unistra.fr/cgi-bin/nph-sesame/-oxp/~S?{}'.format(name)
Expand All @@ -1446,7 +1448,7 @@ def return_resolver_pdf(payload: dict) -> pd.DataFrame:
0, False, False
)
client.close()
pdfs = pd.DataFrame.from_dict(results, orient='index')
pdfs = pd.DataFrame.from_dict(hbase_to_dict(results), orient='index')

# ssnmanenr -> MPC name & number
if not pdfs.empty:
Expand All @@ -1462,7 +1464,7 @@ def return_resolver_pdf(payload: dict) -> pd.DataFrame:
)
results.update(result)
client.close()
pdfs = pd.DataFrame.from_dict(results, orient='index')
pdfs = pd.DataFrame.from_dict(hbase_to_dict(results), orient='index')
else:
# MPC -> ssnamenr
# keys follow the pattern <name>-<deduplication>
Expand All @@ -1483,7 +1485,7 @@ def return_resolver_pdf(payload: dict) -> pd.DataFrame:
0, False, False
)
client.close()
pdfs = pd.DataFrame.from_dict(results, orient='index')
pdfs = pd.DataFrame.from_dict(hbase_to_dict(results), orient='index')

return pdfs

Expand Down Expand Up @@ -1604,7 +1606,7 @@ def download_euclid_data(payload: dict) -> pd.DataFrame:
0, False, False
)

pdf = pd.DataFrame.from_dict(results, orient='index')
pdf = pd.DataFrame.from_dict(hbase_to_dict(results), orient='index')

# Remove hbase specific fields
if 'key:key' in pdf.columns:
Expand Down Expand Up @@ -1656,7 +1658,7 @@ def retrieve_metadata(objectId: str) -> pd.DataFrame:
"*",
0, False, False
)
pdf = pd.DataFrame.from_dict(results, orient='index')
pdf = pd.DataFrame.from_dict(hbase_to_dict(results), orient='index')
client.close()
return pdf

Expand All @@ -1671,7 +1673,7 @@ def retrieve_oid(metaname: str, field: str) -> pd.DataFrame:
"*",
0, True, True
)
pdf = pd.DataFrame.from_dict(results, orient='index')
pdf = pd.DataFrame.from_dict(hbase_to_dict(results), orient='index')
client.close()

return pdf
90 changes: 83 additions & 7 deletions apps/cards.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def card_lightcurve_summary():
Returns
----------
card: dbc.Card
Card with the cutouts drawn inside
Card with the lightcurve drawn inside
"""
card = dmc.Paper(
[
Expand Down Expand Up @@ -228,6 +228,18 @@ def create_external_links(ra0, dec0):
href='https://ztf.snad.space/search/{} {}/{}'.format(ra0, dec0, 5)
)
),
dbc.Col(
dbc.Button(
className='btn btn-default btn-circle btn-lg zoom btn-image',
style={'background-image': 'url(/assets/buttons/dclogo_small.png)'},
color='dark',
outline=True,
id='DataCentral',
title='DataCentral Data Aggregation Service',
target="_blank",
href='https://das.datacentral.org.au/open?RA={}&DEC={}&FOV={}&ERR={}'.format(ra0, dec0, 0.5, 2.0)
)
),
], justify='around'
),
dbc.Row(
Expand Down Expand Up @@ -341,21 +353,24 @@ def card_neighbourhood(pdf):
else:
gaianame = None
cdsxmatch = pdf['d:cdsxmatch'].values[0]
vsx = pdf['d:vsx'].values[0]
gcvs = pdf['d:gcvs'].values[0]

card = dmc.Paper(
[
dcc.Markdown(
"""
Constellation: `{}`
Class (SIMBAD): `{}`
Class (VSX/GCVS): `{}` / `{}`
Name (MPC): `{}`
Name (Gaia): `{}`
Distance (Gaia): `{:.2f}` arcsec
Distance (PS1): `{:.2f}` arcsec
Distance (ZTF): `{:.2f}` arcsec
""".format(
constellation,
cdsxmatch, ssnamenr, gaianame,
cdsxmatch, vsx, gcvs, ssnamenr, gaianame,
float(neargaia), float(distpsnr1), float(distnr)
),
className="markdown markdown-pre ps-2 pe-2"
Expand Down Expand Up @@ -553,7 +568,10 @@ def card_id(pdf):
loading(
dmc.Paper(
[
dbc.Row(id='stamps', justify='around', className="g-0"),
dbc.Row(
dmc.Skeleton(style={'width': '100%', 'aspect-ratio': '3/1'}),
id='stamps', justify='around', className="g-0"
),
],
radius='sm', p='xs', shadow='sm', withBorder=True, style={'padding':'5px'}
)
Expand Down Expand Up @@ -879,10 +897,12 @@ def generate_metadata_name(oid):
@app.callback(
Output('card_id_left', 'children'),
[
Input('object-data', 'children'),
Input('object-uppervalid', 'children'),
Input('object-upper', 'children')
])
Input('object-data', 'data'),
Input('object-uppervalid', 'data'),
Input('object-upper', 'data')
],
prevent_initial_call=True
)
def card_id1(object_data, object_uppervalid, object_upper):
""" Add a card containing basic alert data
"""
Expand Down Expand Up @@ -949,6 +969,28 @@ def card_id1(object_data, object_uppervalid, object_upper):
)
)

gcvs = pdf['d:gcvs'].values[0]
if gcvs and gcvs != 'Unknown':
badges.append(
dmc.Badge(
"GCVS: {}".format(gcvs),
variant='outline',
color=class_colors['Simbad'],
size='md'
)
)

vsx = pdf['d:vsx'].values[0]
if vsx and vsx != 'Unknown':
badges.append(
dmc.Badge(
"VSX: {}".format(vsx),
variant='outline',
color=class_colors['Simbad'],
size='md'
)
)

distnr = pdf['i:distnr'].values[0]
if distnr:
if is_source_behind(distnr):
Expand Down Expand Up @@ -1122,6 +1164,28 @@ def card_search_result(row, i):
)
)

gcvs = row.get('d:gcvs')
if gcvs and gcvs != 'Unknown':
badges.append(
dmc.Badge(
"GCVS: {}".format(gcvs),
variant='outline',
color=class_colors['Simbad'],
size='md'
)
)

vsx = row.get('d:vsx')
if vsx and vsx != 'Unknown':
badges.append(
dmc.Badge(
"VSX: {}".format(vsx),
variant='outline',
color=class_colors['Simbad'],
size='md'
)
)

# Nearby objects
distnr = row.get('i:distnr')
if distnr:
Expand Down Expand Up @@ -1226,6 +1290,12 @@ def card_search_result(row, i):
dbc.Row(
[
dbc.Col(
dmc.Skeleton(
style={
'width': '12pc',
'height': '12pc',
},
),
id={'type': 'search_results_cutouts', 'objectId': name, 'index': i},
width='auto'
),
Expand All @@ -1237,6 +1307,12 @@ def card_search_result(row, i):
width='auto',
),
dbc.Col(
dmc.Skeleton(
style={
'width': '100%',
'height': '15pc'
},
),
id={'type': 'search_results_lightcurve', 'objectId': name, 'index': i},
xs=12, md=True,
),
Expand Down
2 changes: 1 addition & 1 deletion apps/gw.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ def display_skymap_gw(nclick, gw_data, credible_level, superevent_name, searchur

pdf = pd.read_json(gw_data)
if len(pdf) > 0:
pdf['v:lastdate'] = pdf['i:jd'].apply(convert_jd)
pdf['v:lastdate'] = convert_jd(pdf['i:jd'])
pdf['i:objectId'] = pdf['i:objectId'].apply(markdownify_objectid)
# Coordinate of the first alert
ra0 = pdf['i:ra'].values[0]
Expand Down
5 changes: 3 additions & 2 deletions apps/mulens/cards.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,9 @@
@app.callback(
Output("card_mulens", "children"),
[
Input('object-data', 'children'),
]
Input('object-data', 'data'),
],
prevent_initial_call=True
)
def card_mulens(object_data):
""" Add a card containing button to fit for microlensing events
Expand Down
Loading

0 comments on commit eb75e6b

Please sign in to comment.