Skip to content

Commit

Permalink
Merge pull request #229 from fermiPy/dmcat-fermipy
Browse files Browse the repository at this point in the history
Dmcat fermipy
  • Loading branch information
Di Mauro Mattia authored May 23, 2018
2 parents 0cfd4e9 + 831998a commit b4d7e3c
Show file tree
Hide file tree
Showing 34 changed files with 1,858 additions and 800 deletions.
29 changes: 26 additions & 3 deletions fermipy/castro.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from fermipy.spectrum import SpectralFunction, SEDFunctor
from fermipy.utils import onesided_cl_to_dlnl
from fermipy.utils import twosided_cl_to_dlnl

from fermipy.utils import load_yaml

PAR_NAMES = {
"PowerLaw": ["Prefactor", "Index"],
Expand Down Expand Up @@ -600,6 +600,9 @@ def __init__(self, norm_vals, nll_vals, norm_type):
norm_vals : `~numpy.ndarray`
The normalization values in an N X M array, where N is the
number for bins and M number of sampled values for each bin
Note that these should be the true values, with the
reference spectrum included, and _NOT_ the values w.r.t. to the
reference spectrum.
nll_vals : `~numpy.ndarray`
The _negative_ log-likelihood values in an N X M array,
Expand Down Expand Up @@ -960,7 +963,7 @@ def TS_spectrum(self, spec_vals):
return 2. * (self._nll_null - self.__call__(spec_vals))

def build_scandata_table(self):
"""
"""Build an `astropy.table.Table` object from these data.
"""
shape = self._norm_vals.shape
col_norm = Column(name="norm", dtype=float)
Expand Down Expand Up @@ -1069,6 +1072,26 @@ def refSpec(self):
""" Return a `~fermipy.castro.ReferenceSpec` with the spectral data """
return self._refSpec

@classmethod
def create_from_yamlfile(cls, yamlfile):
"""Create a Castro data object from a yaml file contains
the likelihood data."""
data = load_yaml(yamlfile)
nebins = len(data)
emin = np.array([data[i]['emin'] for i in range(nebins)])
emax = np.array([data[i]['emax'] for i in range(nebins)])
ref_flux = np.array([data[i]['flux'][1] for i in range(nebins)])
ref_eflux = np.array([data[i]['eflux'][1] for i in range(nebins)])
conv = np.array([data[i]['eflux2npred'] for i in range(nebins)])
ref_npred = conv*ref_eflux
ones = np.ones(ref_flux.shape)
ref_spec = ReferenceSpec(emin, emax, ones, ref_flux, ref_eflux, ref_npred)
norm_data = np.array([data[i]['eflux'] for i in range(nebins)])
ll_data = np.array([data[i]['logLike'] for i in range(nebins)])
max_ll = ll_data.max(1)
nll_data = (max_ll - ll_data.T).T
return cls(norm_data, nll_data, ref_spec, 'eflux')

@classmethod
def create_from_flux_points(cls, txtfile):
"""Create a Castro data object from a text file containing a
Expand Down Expand Up @@ -1240,7 +1263,7 @@ def create_from_sedfile(cls, fitsfile, norm_type='eflux'):
elif norm_type == "norm":
norm_vals = np.array(tab_s['norm_scan'])
else:
raise Exception('Unrecognized normalization type: %s' % norm_type)
raise ValueError('Unrecognized normalization type: %s' % norm_type)

nll_vals = -np.array(tab_s['dloglike_scan'])
ref_spec = ReferenceSpec.create_from_table(tab_s)
Expand Down
2 changes: 1 addition & 1 deletion fermipy/diffuse/defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
'hpx_order_ccube': (9, 'Maximum HEALPIX order for binning counts data.', int),
'hpx_order_expcube': (6, 'Maximum HEALPIX order for exposure cubes.', int),
'hpx_order_fitting': (7, 'Maximum HEALPIX order for model fitting.', int),
'mktimefilter': ('nosm', 'Key for gtmktime selection', str),
'mktimefilter': (None, 'Key for gtmktime selection', str),
'do_ltsum': (False, 'Run gtltsum on inputs', bool),
'make_xml': (True, 'Make XML files.', bool),
'dry_run': (False, 'Print commands but do not run them', bool),
Expand Down
196 changes: 118 additions & 78 deletions fermipy/diffuse/diffuse_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from __future__ import absolute_import, division, print_function

from fermipy.utils import load_yaml
from fermipy.jobs.link import Link
from fermipy.jobs.chain import Chain

from fermipy.diffuse import defaults as diffuse_defaults
Expand All @@ -21,7 +22,19 @@
NAME_FACTORY = NameFactory()

class DiffuseCompChain(Chain):
"""Small class to build srcmaps for diffuse components
"""Chain to build srcmaps for diffuse components
This chain consists of:
sum-rings : SumRings_SG
Merge GALProp gas maps by type and ring
srcmaps-diffuse : SrcmapsDiffuse_SG
Compute diffuse component source maps in parallel
vstack-diffuse : Vstack_SG
Combine diffuse component source maps
"""
appname = 'fermipy-diffuse-comp-chain'
linkname_default = 'diffuse-comp'
Expand All @@ -35,39 +48,53 @@ class DiffuseCompChain(Chain):
outdir=(None, 'Output directory', str),
dry_run=diffuse_defaults.diffuse['dry_run'])

__doc__ += Link.construct_docstring(default_options)

def __init__(self, **kwargs):
"""C'tor
"""
super(DiffuseCompChain, self).__init__(**kwargs)
self.comp_dict = None

def _map_arguments(self, input_dict):
def _map_arguments(self, args):
"""Map from the top-level arguments to the arguments provided to
the indiviudal links """
data = input_dict.get('data')
comp = input_dict.get('comp')
library = input_dict.get('library')
dry_run = input_dict.get('dry_run', False)
data = args.get('data')
comp = args.get('comp')
library = args.get('library')
dry_run = args.get('dry_run', False)

self._load_link_args('sum-rings', SumRings_SG,
library=library,
outdir=input_dict['outdir'],
dry_run=dry_run)
self._set_link('sum-rings', SumRings_SG,
library=library,
outdir=args['outdir'],
dry_run=dry_run)

self._load_link_args('srcmaps-diffuse', SrcmapsDiffuse_SG,
comp=comp, data=data,
library=library,
make_xml=input_dict['make_xml'],
dry_run=dry_run)
self._set_link('srcmaps-diffuse', SrcmapsDiffuse_SG,
comp=comp, data=data,
library=library,
make_xml=args['make_xml'],
dry_run=dry_run)

self._load_link_args('vstack-diffuse', Vstack_SG,
comp=comp, data=data,
library=library,
dry_run=dry_run)
self._set_link('vstack-diffuse', Vstack_SG,
comp=comp, data=data,
library=library,
dry_run=dry_run)


class CatalogCompChain(Chain):
"""Small class to build srcmaps for diffuse components
"""Small class to build srcmaps for catalog components
This chain consists of:
srcmaps-catalog : SrcmapsCatalog_SG
Build source maps for all catalog sources in parallel
gather-srcmaps : GatherSrcmaps_SG
Gather source maps into
merge-srcmaps : MergeSrcmaps_SG
Compute source maps for merged sources
"""
appname = 'fermipy-catalog-comp-chain'
linkname_default = 'catalog-comp'
Expand All @@ -81,44 +108,58 @@ class CatalogCompChain(Chain):
make_xml=(False, "Make XML files for diffuse components", bool),
dry_run=diffuse_defaults.diffuse['dry_run'])

__doc__ += Link.construct_docstring(default_options)

def __init__(self, **kwargs):
"""C'tor
"""
super(CatalogCompChain, self).__init__(**kwargs)
self.comp_dict = None

def _register_link_classes(self):
GatherSrcmaps_SG.register_class()
MergeSrcmaps_SG.register_class()
SrcmapsCatalog_SG.register_class()

def _map_arguments(self, input_dict):
def _map_arguments(self, args):
"""Map from the top-level arguments to the arguments provided to
the indiviudal links """
data = input_dict.get('data')
comp = input_dict.get('comp')
library = input_dict.get('library')
dry_run = input_dict.get('dry_run', False)
data = args.get('data')
comp = args.get('comp')
library = args.get('library')
dry_run = args.get('dry_run', False)

self._set_link('srcmaps-catalog', SrcmapsCatalog_SG,
comp=comp, data=data,
library=library,
nsrc=args.get('nsrc', 500),
dry_run=dry_run)

self._set_link('gather-srcmaps', GatherSrcmaps_SG,
comp=comp, data=data,
library=library,
dry_run=dry_run)

self._set_link('merge-srcmaps', MergeSrcmaps_SG,
comp=comp, data=data,
library=library,
dry_run=dry_run)

self._load_link_args('srcmaps-catalog', SrcmapsCatalog_SG,
comp=comp, data=data,
library=library,
nsrc=input_dict.get('nsrc', 500),
dry_run=dry_run)

self._load_link_args('gather-srcmaps', GatherSrcmaps_SG,
comp=comp, data=data,
library=library,
dry_run=dry_run)
class DiffuseAnalysisChain(Chain):
"""Chain to define diffuse all-sky analysis
self._load_link_args('merge-srcmaps', MergeSrcmaps_SG,
comp=comp, data=data,
library=library,
dry_run=dry_run)
This chain consists of:
prepare : `SplitAndBinChain`
Bin the data and make the exposure maps
class DiffuseAnalysisChain(Chain):
"""Small class to define diffuse analysis chain"""
diffuse-comp : `DiffuseCompChain`
Make source maps for diffuse components
catalog-comp : `CatalogCompChain`
Make source maps for catalog components
assemble-model : `AssembleModelChain`
Assemble the models for fitting
"""
appname = 'fermipy-diffuse-analysis'
linkname_default = 'diffuse'
usage = '%s [options]' % (appname)
Expand All @@ -127,51 +168,50 @@ class DiffuseAnalysisChain(Chain):
default_options = dict(config=diffuse_defaults.diffuse['config'],
dry_run=diffuse_defaults.diffuse['dry_run'])

def _map_arguments(self, input_dict):
__doc__ += Link.construct_docstring(default_options)

def _map_arguments(self, args):
"""Map from the top-level arguments to the arguments provided to
the indiviudal links """
config_yaml = input_dict['config']
config_yaml = args['config']
config_dict = load_yaml(config_yaml)

dry_run = input_dict.get('dry_run', False)
dry_run = args.get('dry_run', False)

data = config_dict.get('data')
comp = config_dict.get('comp')
library = config_dict.get('library')
models = config_dict.get('models')
scratch = config_dict.get('scratch')

self._load_link_args('prepare', SplitAndBinChain,
comp=comp, data=data,
ft1file=config_dict.get('ft1file'),
hpx_order_ccube=config_dict.get('hpx_order_ccube'),
hpx_order_expcube=config_dict.get('hpx_order_expcube'),
scratch=scratch,
dry_run=dry_run)

self._load_link_args('diffuse-comp', DiffuseCompChain,
comp=comp, data=data,
library=library,
make_xml=config_dict.get('make_diffuse_comp_xml', False),
outdir=config_dict.get('merged_gasmap_dir', 'merged_gasmap'),
dry_run=dry_run)

self._load_link_args('catalog-comp', CatalogCompChain,
comp=comp, data=data,
library=library,
make_xml=config_dict.get('make_catalog_comp_xml', False),
nsrc=config_dict.get('catalog_nsrc', 500),
dry_run=dry_run)

self._load_link_args('assemble-model', AssembleModelChain,
comp=comp, data=data,
library=library,
models=models,
hpx_order=config_dict.get('hpx_order_fitting'),
dry_run=dry_run)



self._set_link('prepare', SplitAndBinChain,
comp=comp, data=data,
ft1file=config_dict.get('ft1file'),
hpx_order_ccube=config_dict.get('hpx_order_ccube'),
hpx_order_expcube=config_dict.get('hpx_order_expcube'),
scratch=scratch,
dry_run=dry_run)

self._set_link('diffuse-comp', DiffuseCompChain,
comp=comp, data=data,
library=library,
make_xml=config_dict.get('make_diffuse_comp_xml', False),
outdir=config_dict.get('merged_gasmap_dir', 'merged_gasmap'),
dry_run=dry_run)

self._set_link('catalog-comp', CatalogCompChain,
comp=comp, data=data,
library=library,
make_xml=config_dict.get('make_catalog_comp_xml', False),
nsrc=config_dict.get('catalog_nsrc', 500),
dry_run=dry_run)

self._set_link('assemble-model', AssembleModelChain,
comp=comp, data=data,
library=library,
models=models,
hpx_order=config_dict.get('hpx_order_fitting'),
dry_run=dry_run)


def register_classes():
Expand Down
20 changes: 10 additions & 10 deletions fermipy/diffuse/gt_assemble_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,16 +283,16 @@ def _map_arguments(self, input_dict):
hpx_order = input_dict.get('hpx_order_fitting')
dry_run = input_dict.get('dry_run', False)

self._load_link_args('init-model', InitModel,
comp=comp, data=data,
library=library,
models=models,
hpx_order=hpx_order,
dry_run=dry_run)

self._load_link_args('assemble-model', AssembleModel_SG,
comp=comp, data=data,
models=models)
self._set_link('init-model', InitModel,
comp=comp, data=data,
library=library,
models=models,
hpx_order=hpx_order,
dry_run=dry_run)

self._set_link('assemble-model', AssembleModel_SG,
comp=comp, data=data,
models=models)


def register_classes():
Expand Down
Loading

0 comments on commit b4d7e3c

Please sign in to comment.