Skip to content

Commit

Permalink
Merge pull request #259 from sot/modernize
Browse files Browse the repository at this point in the history
Modernize
  • Loading branch information
taldcroft authored Mar 28, 2024
2 parents 35a92b1 + 459a61b commit aaa9dbd
Show file tree
Hide file tree
Showing 23 changed files with 225 additions and 181 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.1.5
rev: v0.3.3
hooks:
# Run the linter.
- id: ruff
Expand Down
8 changes: 3 additions & 5 deletions cheta/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,9 @@
import collections
import functools
from heapq import nsmallest
from itertools import filterfalse
from operator import itemgetter

import six
from six.moves import filterfalse


class Counter(dict):
"Mapping where default values are zero"
Expand All @@ -16,7 +14,7 @@ def __missing__(self, key):
return 0


# TODO: replace with std_library version of this in Py3.6 (issue #173)
# Note: this is not equivalent to functools.lru_cache, see #173.


def lru_cache(maxsize=30):
Expand Down Expand Up @@ -131,7 +129,7 @@ def wrapper(*args, **kwds):
# purge least frequently used cache entry
if len(cache) > maxsize:
for key, _ in nsmallest(
maxsize // 10, six.iteritems(use_count), key=itemgetter(1)
maxsize // 10, use_count.items(), key=itemgetter(1)
):
del cache[key], use_count[key]

Expand Down
2 changes: 0 additions & 2 deletions cheta/converters.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import, division, print_function

import logging
import sys
Expand All @@ -10,7 +9,6 @@
import Ska.Numpy
import Ska.tdb
from Chandra.Time import DateTime
from six.moves import zip

from . import units

Expand Down
7 changes: 6 additions & 1 deletion cheta/derived/comps.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@

import functools
import re
import warnings

import astropy.table as tbl
import numpy as np
Expand Down Expand Up @@ -428,7 +429,11 @@ def get_msid_attrs(self, tstart: float, tstop: float, msid: str, msid_args: tupl
q4 = np.sqrt((1.0 - q1**2 - q2**2 - q3**2).clip(0.0))

q = np.array([q1, q2, q3, q4]).transpose()
quat = Quat(q=normalize(q))
with warnings.catch_warnings():
warnings.filterwarnings(
"ignore", message="Normalizing quaternion with zero norm"
)
quat = Quat(q=normalize(q))
bads = np.zeros_like(q1, dtype=bool)
for msid in msids:
bads |= dat[msid].bads
Expand Down
126 changes: 63 additions & 63 deletions cheta/derived/mups_valve.py
Original file line number Diff line number Diff line change
@@ -1,67 +1,67 @@
"""
Fetch clean MUPS valve temperature telemetry
This makes use of the temperature correction code provided by Scott Blanchard (to correct
for a resistor dropout in the thermistor data) and xija thermal model developed by
Matt Dahmer.
The basic cleaning algorithm is very simple:
- Fetch raw telemetry from the cheta archive.
- Compute a xija thermal model prediction for the same timespan (actually starting a few
days in advance to burn out uncertainty in the pseudo-node value).
- Accept either raw telemetry or corrected data which are within a tolerance (5 degF) of
the model.
- In the gaps where the model diverges from both raw and corrected temperatures,
"repropagate" the model starting from the last accepted temperature value.
This effectively takes out much of the systematic model error, which can be up to
15 degF after a transition to a new attitude.
- In some cases this allows recovery of additional data, while in others the data are
not recoverable: either due to partial disconnect of the parallel resistor or full
disconnects where the output voltage exceeds 5.12 V.
The output of this function is a `fetch.Msid` object with some bonus attributes,
documented below. In particular the output cleaned data are labeled so one knows
exactly where each data point came from.
The function is fast, and one can get 5 years of cleaned telemetry in a few seconds
on a modern laptop with SSD drive.
This cleaning technique recovers on average about 90% of data for PM2THV1T. Since
2015, about 60% of telemetry is good (no dropout) while 30% is in a recoverable
fully-dropped state (and 10% is not recoverable).
```
def fetch_clean_msid(msid, start, stop=None, dt_thresh=5.0, median=7, model_spec=None,
version=None):
Fetch a cleaned version of telemetry for ``msid``.
If not supplied the model spec will come from
``xija.get_model_spec.get_xija_model_spec(msid, version=version)``
(which uses ``$SKA/data/chandra_models/chandra_models/xija/mups_valve/{msid}_spec.json``).
This function returns a `fetch.Msid` object like a normal fetch but with extra attributes:
- vals: cleaned telemetry (either original or corrected telemetry, or xija model prediction)
- source: label for each vals data point
- 0: unrecoverable, so use xija model value
- 1: original telemetry
- 2: corrected telemetry
- vals_raw: raw (uncleaned) telemetry
- vals_nan: cleaned telem but with np.nan at points where data are unrecoverable (this is
for plotting)
- vals_corr: telemetry with the MUPS correction applied
- vals_model: xija model prediction
:param start: start time
:param stop: stop time (default=NOW)
:param dt_thresh: tolerance for matching model to data in degF (default=5 degF)
:param median: length of median filter (default=7, use 0 to disable)
:param model_spec: file name or URL containing relevant xija model spec
:param version: version of chandra_models repo (tag, branch, or commit)
:returns: fetch.Msid object
```
Fetch clean MUPS valve temperature telemetry
This makes use of the temperature correction code provided by Scott Blanchard (to correct
for a resistor dropout in the thermistor data) and xija thermal model developed by
Matt Dahmer.
The basic cleaning algorithm is very simple:
- Fetch raw telemetry from the cheta archive.
- Compute a xija thermal model prediction for the same timespan (actually starting a few
days in advance to burn out uncertainty in the pseudo-node value).
- Accept either raw telemetry or corrected data which are within a tolerance (5 degF) of
the model.
- In the gaps where the model diverges from both raw and corrected temperatures,
"repropagate" the model starting from the last accepted temperature value.
This effectively takes out much of the systematic model error, which can be up to
15 degF after a transition to a new attitude.
- In some cases this allows recovery of additional data, while in others the data are
not recoverable: either due to partial disconnect of the parallel resistor or full
disconnects where the output voltage exceeds 5.12 V.
The output of this function is a `fetch.Msid` object with some bonus attributes,
documented below. In particular the output cleaned data are labeled so one knows
exactly where each data point came from.
The function is fast, and one can get 5 years of cleaned telemetry in a few seconds
on a modern laptop with SSD drive.
This cleaning technique recovers on average about 90% of data for PM2THV1T. Since
2015, about 60% of telemetry is good (no dropout) while 30% is in a recoverable
fully-dropped state (and 10% is not recoverable).
```
def fetch_clean_msid(msid, start, stop=None, dt_thresh=5.0, median=7, model_spec=None,
version=None):
Fetch a cleaned version of telemetry for ``msid``.
If not supplied the model spec will come from
``xija.get_model_spec.get_xija_model_spec(msid, version=version)``
(which uses ``$SKA/data/chandra_models/chandra_models/xija/mups_valve/{msid}_spec.json``).
This function returns a `fetch.Msid` object like a normal fetch but with extra attributes:
- vals: cleaned telemetry (either original or corrected telemetry, or xija model prediction)
- source: label for each vals data point
- 0: unrecoverable, so use xija model value
- 1: original telemetry
- 2: corrected telemetry
- vals_raw: raw (uncleaned) telemetry
- vals_nan: cleaned telem but with np.nan at points where data are unrecoverable (this is
for plotting)
- vals_corr: telemetry with the MUPS correction applied
- vals_model: xija model prediction
:param start: start time
:param stop: stop time (default=NOW)
:param dt_thresh: tolerance for matching model to data in degF (default=5 degF)
:param median: length of median filter (default=7, use 0 to disable)
:param model_spec: file name or URL containing relevant xija model spec
:param version: version of chandra_models repo (tag, branch, or commit)
:returns: fetch.Msid object
```
"""

import os
Expand Down
2 changes: 1 addition & 1 deletion cheta/derived/orbit.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import, division, print_function

"""
Orbital elements based on the position and velocity of Chandra at each 5 minute predictive
Expand Down Expand Up @@ -32,6 +31,7 @@
The relevant equations were taken from http://www.castor2.ca/05_OD/01_Gauss/14_Kepler/index.html.
"""

import numpy as np
from Chandra.Time import DateTime

Expand Down
6 changes: 4 additions & 2 deletions cheta/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"""
Fetch values from the Ska engineering telemetry archive.
"""

import collections
import contextlib
import fnmatch
Expand Down Expand Up @@ -294,7 +295,8 @@ def load_msid_names(all_msid_names_files):
all_colnames = dict()
for k, msid_names_file in all_msid_names_files.items():
try:
all_colnames[k] = pickle.load(open(os.path.join(*msid_names_file), "rb"))
with open(os.path.join(*msid_names_file), "rb") as fh:
all_colnames[k] = pickle.load(fh)
except IOError:
pass
return all_colnames
Expand Down Expand Up @@ -718,7 +720,7 @@ def _get_comp_data(self, comp_cls):
self.colnames = [
attr
for attr, val in attrs.items()
if (isinstance(val, np.ndarray) and len(val) == len(attrs["times"]))
if (hasattr(val, "shape") and len(val) == len(attrs["times"]))
]

# Apply attributes to self
Expand Down
1 change: 1 addition & 0 deletions cheta/file_defs.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
Arch files are the CXC archive files containing a short interval of telemetry for
all MSIDs in the same content-type group (e.g. ACIS2ENG).
"""

import os

SKA = os.environ.get("SKA") or "/proj/sot/ska"
Expand Down
5 changes: 1 addition & 4 deletions cheta/get_telem.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
Arguments
=========
"""
from __future__ import absolute_import, division, print_function

import argparse
import ast
Expand All @@ -29,9 +28,7 @@
from itertools import count

import numpy as np
import six
from Chandra.Time import DateTime
from six.moves import zip

from . import fetch, utils

Expand Down Expand Up @@ -188,7 +185,7 @@ def get_telem(
start = stop - 30 if start is None else DateTime(start)
stat = None if sampling == "full" else sampling
filter_bad = interpolate_dt is None
if isinstance(msids, six.string_types):
if isinstance(msids, str):
msids = [msids]

logger.info(
Expand Down
1 change: 0 additions & 1 deletion cheta/plot.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import, division, print_function

import matplotlib.pyplot as plt
import numpy as np
Expand Down
1 change: 0 additions & 1 deletion cheta/remote_access.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
NOTE: see test_remote_access.py for useful information about doing functional
testing of this code.
"""
from __future__ import absolute_import, division, print_function

import getpass
import os
Expand Down
24 changes: 24 additions & 0 deletions cheta/tests/test_comps.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

"""Test that computed MSIDs work as expected."""

import warnings

import astropy.units as u
import numpy as np
import pytest
Expand Down Expand Up @@ -269,6 +271,28 @@ def test_quat_comp(msid, maude, offset):
assert isinstance(datq.vals, Quat)


def test_quat_comp_bad_times():
"""Test bad time data on 2024:264. All four quats have zero value and are bad.
The bad sample times are ['2024:064:09:27:02.652' '2024:064:09:27:03.677'].
"""
start = "2024:064:09:26:00"
stop = "2024:064:09:28:00"
# Assert no warnings despite quat with zero normalization. The zero-norm samples are
# marked bad.
with warnings.catch_warnings():
warnings.simplefilter("error") # Assert no warnings
dat = fetch_eng.MSID("quat_aoattqt", start, stop)

assert np.count_nonzero(dat.bads) == 2
assert len(dat.vals) == len(dat.times)

dat2 = fetch_eng.Msid("quat_aoattqt", start, stop)
assert dat2.bads is None # After Msid filtering
assert len(dat2.vals) == len(dat2.times)
assert len(dat2.vals) == len(dat.vals) - 2


def test_pitch_comp():
"""Test pitch_comp during a time with NPNT, NMAN, NSUN and Safe Sun"""
start = "2022:293"
Expand Down
1 change: 0 additions & 1 deletion cheta/tests/test_data_source.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import, division, print_function

import numpy as np
import pytest
Expand Down
1 change: 1 addition & 0 deletions cheta/tests/test_remote_access.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@
installation root directory (`python -c 'import sys; print(sys.prefix)'`) as
`ska_remote_access.json`.
"""

import os
import shutil
from pathlib import Path
Expand Down
7 changes: 5 additions & 2 deletions cheta/units.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
('VDC', 'V'),
('W', 'W')}
"""

import logging
import os
import pickle
Expand All @@ -60,7 +61,8 @@ def emit(self, record):

units = {}
units["system"] = "cxc"
units["cxc"] = pickle.load(open(os.path.join(module_dir, "units_cxc.pkl"), "rb"))
with open(os.path.join(module_dir, "units_cxc.pkl"), "rb") as fh:
units["cxc"] = pickle.load(fh)


# Equivalent unit descriptors used in 'eng' and 'cxc' units
Expand Down Expand Up @@ -221,7 +223,8 @@ def load_units(unit_system):

if unit_system not in units:
filename = os.path.join(module_dir, "units_{0}.pkl".format(unit_system))
units[unit_system] = pickle.load(open(filename, "rb"))
with open(filename, "rb") as fh:
units[unit_system] = pickle.load(fh)


def set_units(unit_system):
Expand Down
1 change: 0 additions & 1 deletion cheta/update_server_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
sync repository to capture newly-available data since the last bundle.
"""


import argparse
import gzip
import pickle
Expand Down
Loading

0 comments on commit aaa9dbd

Please sign in to comment.