Skip to content

Commit

Permalink
alternate backends with l3_frac and dataset-scaled fix (2.3.15 releas…
Browse files Browse the repository at this point in the history
…e) (#402)

* fix bug in applying l3_frac within dataset scaling (pblum_mode='dataset-scaled') when using alternate backends.
  • Loading branch information
kecnry authored Jan 24, 2021
1 parent 331bc9b commit f28cb04
Show file tree
Hide file tree
Showing 5 changed files with 25 additions and 27 deletions.
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,10 @@ To understand how to use PHOEBE, please consult the [tutorials, scripts and manu
CHANGELOG
----------

### 2.3.15 - alternate backends with l3_frac and dataset-scaled fix

* fix bug in applying l3_frac within dataset scaling (pblum_mode='dataset-scaled') when using alternate backends.

### 2.3.14 - import_solution with uniqueid mismatch fix

* fix bug where falling back on twigs when importing a solution on a different bundle failed. It is still suggested to save the bundle and import solutions on the bundle used when calling export_solver.
Expand Down
2 changes: 1 addition & 1 deletion phoebe/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
"""

__version__ = '2.3.14'
__version__ = '2.3.15'

import os as _os
import sys as _sys
Expand Down
6 changes: 3 additions & 3 deletions phoebe/backend/backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -2396,12 +2396,12 @@ def _worker_setup(self, b, compute, infolist, **kwargs):

# NOTE: domdt is listed in ellc as deg/anomalistic period, but as deg/sidereal period in the fortran source (which agrees with comparisons)
# NOTE: this does NOT need to be iterative, because the original dperdt is in deg/d and independent of period
logger.debug("dperdt (rad/d): ", domdt_rad)
logger.debug("dperdt (rad/d): {}".format(domdt_rad))
period_sid = comp_ps.get_value(qualifier='period', component=orbitref, unit=u.d, **_skip_filter_checks)
# NOTE: period_sidereal does not need to be corrected from t0@system -> t0_supconj because ellc does not support dpdt
logger.debug("period_sidereal(t0@system,t0_ref,dpdt=0): ", period_sid)
logger.debug("period_sidereal(t0@system,t0_ref,dpdt=0): {}".format(period_sid))
domdt = comp_ps.get_value(qualifier='dperdt', component=orbitref, unit=u.deg/u.d, **_skip_filter_checks) * period_sid
logger.debug("dperdt (deg/d * period_sidereal): ", domdt)
logger.debug("dperdt (deg/d * period_sidereal): {}".format(domdt))

f_c = np.sqrt(ecc) * np.cos(w)
f_s = np.sqrt(ecc) * np.sin(w)
Expand Down
34 changes: 14 additions & 20 deletions phoebe/frontend/bundle.py
Original file line number Diff line number Diff line change
Expand Up @@ -10218,10 +10218,21 @@ def restore_conf():
ml_params.set_value(qualifier='times', dataset=ds, value=times_ds, ignore_readonly=True, **_skip_filter_checks)
ml_params.set_value(qualifier='fluxes', dataset=ds, value=fluxes, ignore_readonly=True, **_skip_filter_checks)

# handle scaling to absolute fluxes as necessary for alternate backends
# NOTE: this must happen BEFORE dataset-scaling as that scaling assumes absolute fluxes
for flux_param in ml_params.filter(qualifier='fluxes', kind='lc', **_skip_filter_checks).to_list():
fluxes = flux_param.get_value(unit=u.W/u.m**2)
if computeparams.kind not in ['phoebe', 'legacy']:
# then we need to scale the "normalized" fluxes to pbflux first
fluxes *= pbfluxes.get(flux_param.dataset)
# otherwise fluxes are already correctly scaled by passing
# relative pblums or pblums_scale to the respective backend

flux_param.set_value(fluxes, ignore_readonly=True)

# handle flux scaling for any pblum_mode == 'dataset-scaled'
# or for any dataset in which pblum_mode == 'dataset-coupled' and pblum_dataset points to a 'dataset-scaled' dataset
datasets_dsscaled = []

coupled_datasets = self.filter(qualifier='pblum_mode', dataset=ml_params.datasets, value='dataset-coupled', **_skip_filter_checks).datasets
for pblum_mode_param in self.filter(qualifier='pblum_mode', dataset=ml_params.datasets, value='dataset-scaled', **_skip_filter_checks).to_list():
this_dsscale_datasets = [pblum_mode_param.dataset] + self.filter(qualifier='pblum_dataset', dataset=coupled_datasets, value=pblum_mode_param.dataset, **_skip_filter_checks).datasets
Expand Down Expand Up @@ -10276,7 +10287,6 @@ def _scale_fluxes_cfit(fluxes, scale_factor):
# use values in this namespace rather than passing directly
return _scale_fluxes(fluxes, scale_factor, l3_fracs, l3_pblum_abs_sums, l3_fluxes)

# TODO: can we skip this if sigmas don't exist?
logger.debug("calling curve_fit with estimated scale_factor={}".format(scale_factor_approx))
popt, pcov = cfit(_scale_fluxes_cfit, model_fluxess_interp, ds_fluxess, p0=(scale_factor_approx), sigma=ds_sigmass)
scale_factor = popt[0]
Expand All @@ -10301,25 +10311,15 @@ def _scale_fluxes_cfit(fluxes, scale_factor):

flux_param.set_value(qualifier='fluxes', value=syn_fluxes, ignore_readonly=True)

# scale_factor is currently the factor between the native backend fluxes
# and those scaled to the dataset. For backends to natively give absolute
# fluxes, this can then be applied to luminosities. But for those that
# do not give absolute fluxes, we need to estimate that as well (in other
# words estimate the scaling factor between absolute and the backend as well)
if computeparams.kind in ['ellc', 'jktebop']:
logger.info("estimating absolute flux for compute='{}', dataset='{}' to apply to flux_scale".format(computeparams.compute, flux_param.dataset))
system, pblums_abs, pblums_scale, pblums_rel, pbfluxes = self.compute_pblums(compute=computeparams.compute, dataset=flux_param.dataset, pblum_abs=True, ret_structured_dicts=True, skip_checks=True, **kwargs)
pbflux_abs_est = np.sum(np.asarray(list(pblums_abs.get(flux_param.dataset).values()))/(4*np.pi))
scale_factor /= pbflux_abs_est

ml_addl_params += [FloatParameter(qualifier='flux_scale', dataset=dataset, value=scale_factor, readonly=True, default_unit=u.dimensionless_unscaled, description='scaling applied to fluxes (intensities/luminosities) due to dataset-scaling')]

for mesh_param in ml_params.filter(kind='mesh', **_skip_filter_checks).to_list():
if param.qualifier in ['intensities', 'abs_intensities', 'normal_intensities', 'abs_normal_intensities', 'pblum_ext']:
logger.debug("applying scale_factor={} to {} parameter in mesh".format(scale_factor, mesh_param.qualifier))
mesh_param.set_value(mesh_param.get_value()*scale_factor, ignore_readonly=True)

# handle flux scaling based on pbflux, distance, l3
# handle flux scaling based on distance and l3
# NOTE: this must happen AFTER dataset scaling
distance = self.get_value(qualifier='distance', context='system', unit=u.m, **_skip_filter_checks)
for flux_param in ml_params.filter(qualifier='fluxes', kind='lc', **_skip_filter_checks).to_list():
dataset = flux_param.dataset
Expand All @@ -10329,12 +10329,6 @@ def _scale_fluxes_cfit(fluxes, scale_factor):
continue

fluxes = flux_param.get_value(unit=u.W/u.m**2)
if computeparams.kind not in ['phoebe', 'legacy']:
# then we need to scale the "normalized" fluxes to pbflux first
fluxes *= pbfluxes.get(dataset)
# otherwise fluxes are already correctly scaled by passing
# relative pblums or pblums_scale to the respective backend

fluxes = fluxes/distance**2 + l3s.get(dataset)

flux_param.set_value(fluxes, ignore_readonly=True)
Expand Down
6 changes: 3 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -346,8 +346,8 @@ def _env_variable_bool(key, default):
long_description = "\n".join(long_description_s[long_description_s.index("INTRODUCTION"):])

setup (name = 'phoebe',
version = '2.3.14',
description = 'PHOEBE 2.3.14',
version = '2.3.15',
description = 'PHOEBE 2.3.15',
long_description=long_description,
author = 'PHOEBE development team',
author_email = '[email protected]',
Expand All @@ -367,7 +367,7 @@ def _env_variable_bool(key, default):
'Programming Language :: Python :: 3 :: Only',
],
python_requires='>=3.6, <4',
download_url = 'https://github.com/phoebe-project/phoebe2/tarball/2.3.14',
download_url = 'https://github.com/phoebe-project/phoebe2/tarball/2.3.15',
packages = ['phoebe', 'phoebe.parameters', 'phoebe.parameters.solver', 'phoebe.parameters.figure', 'phoebe.frontend', 'phoebe.constraints', 'phoebe.dynamics', 'phoebe.distortions', 'phoebe.algorithms', 'phoebe.atmospheres', 'phoebe.backend', 'phoebe.solverbackends', 'phoebe.solverbackends.ebai', 'phoebe.utils', 'phoebe.helpers', 'phoebe.pool', 'phoebe.dependencies', 'phoebe.dependencies.autofig', 'phoebe.dependencies.nparray', 'phoebe.dependencies.distl', 'phoebe.dependencies.unitsiau2015'],
install_requires=['numpy>=1.12','scipy>=1.2','astropy>=1.0', 'corner', 'pytest', 'requests', 'python-socketio[client]']+['flask', 'flask-cors', 'flask-socketio', 'gevent-websocket'],
package_data={'phoebe.atmospheres':['tables/wd/*', 'tables/passbands/*'],
Expand Down

0 comments on commit f28cb04

Please sign in to comment.