Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Mar 10, 2023
1 parent a9a2edf commit f5b7cf2
Show file tree
Hide file tree
Showing 17 changed files with 61 additions and 71 deletions.
2 changes: 1 addition & 1 deletion abacusnbody/data/asdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def compress(self, data, **kwargs):
shuffle = blosc.SHUFFLE
elif shuffle == 'bitshuffle':
shuffle = blosc.BITSHUFFLE
elif shuffle == None:
elif shuffle is None:
shuffle = blosc.NOSHUFFLE
else:
raise ValueError(shuffle)
Expand Down
42 changes: 21 additions & 21 deletions abacusnbody/data/compaso_halo_catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from glob import glob
from os.path import abspath, basename, dirname, isdir, isfile
from os.path import join as pjoin
from os.path import normpath, samefile
from os.path import samefile
from pathlib import PurePath

# Stop astropy from trying to download time data; nodes on some clusters are not allowed to access the internet directly
Expand Down Expand Up @@ -167,7 +167,7 @@ def __init__(self, path, cleaned=True, subsamples=False, convert_units=True, unp
# said `cleaned=True` or because this is a halo light cone catalog, which is already cleaned
self.cleaned = cleaned

if halo_lc == None:
if halo_lc is None:
halo_lc = self._is_path_halo_lc(path)
if verbose and halo_lc:
print('Detected halo light cone catalog.')
Expand Down Expand Up @@ -388,14 +388,14 @@ def _setup_load_subsamples(self, load_subsamples):
Will be returned as lists of strings in `load_AB` and `load_pidrv`.
`unpack_subsamples` is for pipelining, to keep things in rvint.
'''
if load_subsamples == False:
if load_subsamples is False:
# stub
load_AB = []
load_pidrv = []
unpack_subsamples = True
else:
# If user has not specified which subsamples, then assume user wants to load everything
if load_subsamples == True:
if load_subsamples is True:
load_subsamples = dict(A=True, B=True, rv=True, pid=True)

if type(load_subsamples) == dict:
Expand Down Expand Up @@ -1221,7 +1221,7 @@ def _unpack_euler16(bin_this):
bin_this = bin_this - cap*(EULER_TBIN*EULER_TBIN)

it = (np.floor(np.sqrt(bin_this))).astype(int)
its = np.sum(np.isnan(it))
np.sum(np.isnan(it))


ir = bin_this - it*it
Expand All @@ -1238,23 +1238,23 @@ def _unpack_euler16(bin_this):
# and zz=1
norm = 1.0/np.sqrt(1.0+xx*xx+yy*yy)
zz = norm
yy *= norm; xx *= norm; # These are now a unit vector
yy *= norm; xx *= norm # These are now a unit vector

# TODO: legacy code, rewrite
major[cap==0,0] = zz[cap==0]; major[cap==0,1] = yy[cap==0]; major[cap==0,2] = xx[cap==0];
major[cap==1,0] = zz[cap==1]; major[cap==1,1] =-yy[cap==1]; major[cap==1,2] = xx[cap==1];
major[cap==2,0] = zz[cap==2]; major[cap==2,1] = xx[cap==2]; major[cap==2,2] = yy[cap==2];
major[cap==3,0] = zz[cap==3]; major[cap==3,1] = xx[cap==3]; major[cap==3,2] =-yy[cap==3];
major[cap==0,0] = zz[cap==0]; major[cap==0,1] = yy[cap==0]; major[cap==0,2] = xx[cap==0]
major[cap==1,0] = zz[cap==1]; major[cap==1,1] =-yy[cap==1]; major[cap==1,2] = xx[cap==1]
major[cap==2,0] = zz[cap==2]; major[cap==2,1] = xx[cap==2]; major[cap==2,2] = yy[cap==2]
major[cap==3,0] = zz[cap==3]; major[cap==3,1] = xx[cap==3]; major[cap==3,2] =-yy[cap==3]

major[cap==4,1] = zz[cap==4]; major[cap==4,2] = yy[cap==4]; major[cap==4,0] = xx[cap==4];
major[cap==5,1] = zz[cap==5]; major[cap==5,2] =-yy[cap==5]; major[cap==5,0] = xx[cap==5];
major[cap==6,1] = zz[cap==6]; major[cap==6,2] = xx[cap==6]; major[cap==6,0] = yy[cap==6];
major[cap==7,1] = zz[cap==7]; major[cap==7,2] = xx[cap==7]; major[cap==7,0] =-yy[cap==7];
major[cap==4,1] = zz[cap==4]; major[cap==4,2] = yy[cap==4]; major[cap==4,0] = xx[cap==4]
major[cap==5,1] = zz[cap==5]; major[cap==5,2] =-yy[cap==5]; major[cap==5,0] = xx[cap==5]
major[cap==6,1] = zz[cap==6]; major[cap==6,2] = xx[cap==6]; major[cap==6,0] = yy[cap==6]
major[cap==7,1] = zz[cap==7]; major[cap==7,2] = xx[cap==7]; major[cap==7,0] =-yy[cap==7]

major[cap==8,2] = zz[cap==8]; major[cap==8,0] = yy[cap==8]; major[cap==8,1] = xx[cap==8];
major[cap==9,2] = zz[cap==9]; major[cap==9,0] =-yy[cap==9]; major[cap==9,1] = xx[cap==9];
major[cap==10,2] = zz[cap==10]; major[cap==10,0] = xx[cap==10]; major[cap==10,1] = yy[cap==10];
major[cap==11,2] = zz[cap==11]; major[cap==11,0] = xx[cap==11]; major[cap==11,1] =-yy[cap==11];
major[cap==8,2] = zz[cap==8]; major[cap==8,0] = yy[cap==8]; major[cap==8,1] = xx[cap==8]
major[cap==9,2] = zz[cap==9]; major[cap==9,0] =-yy[cap==9]; major[cap==9,1] = xx[cap==9]
major[cap==10,2] = zz[cap==10]; major[cap==10,0] = xx[cap==10]; major[cap==10,1] = yy[cap==10]
major[cap==11,2] = zz[cap==11]; major[cap==11,0] = xx[cap==11]; major[cap==11,1] =-yy[cap==11]

# Next, we can get the minor axis
az = (iaz+0.5)*(1.0/EULER_ABIN)*np.pi
Expand All @@ -1265,13 +1265,13 @@ def _unpack_euler16(bin_this):
# are perpendicular.

eq2 = (cap//4) == 2
minor[eq2,0] = xx[eq2]; minor[eq2,1] = yy[eq2];
minor[eq2,0] = xx[eq2]; minor[eq2,1] = yy[eq2]
minor[eq2,2] = (minor[eq2,0]*major[eq2,0]+minor[eq2,1]*major[eq2,1])/(-major[eq2,2])
eq4 = (cap//4) == 0
minor[eq4,1] = xx[eq4]; minor[eq4,2] = yy[eq4];
minor[eq4,1] = xx[eq4]; minor[eq4,2] = yy[eq4]
minor[eq4,0] = (minor[eq4,1]*major[eq4,1]+minor[eq4,2]*major[eq4,2])/(-major[eq4,0])
eq1 = (cap//4) == 1
minor[eq1,2] = xx[eq1]; minor[eq1,0] = yy[eq1];
minor[eq1,2] = xx[eq1]; minor[eq1,0] = yy[eq1]
minor[eq1,1] = (minor[eq1,2]*major[eq1,2]+minor[eq1,0]*major[eq1,0])/(-major[eq1,1])
minor *= (1./np.linalg.norm(minor,axis=1).reshape(N,1))

Expand Down
1 change: 0 additions & 1 deletion abacusnbody/data/pipe_asdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,6 @@
import gc
import sys
from os.path import isfile
from os.path import join as pjoin
from timeit import default_timer as timer

import asdf
Expand Down
12 changes: 6 additions & 6 deletions abacusnbody/hod/GRAND_HOD.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import numpy as np
from astropy.io import ascii
from astropy.table import Table
from numba import jit, njit, types
from numba import njit, types
from numba.typed import Dict

# import yaml
Expand Down Expand Up @@ -171,7 +171,7 @@ def gen_cent(pos, vel, mass, ids, multis, randoms, vdev, deltac, fenv,
ELG_marker += N_cen_ELG_v1(mass[i], pmax_E, Q_E, logM_cut_E_temp, sigma_E, gamma_E) * ic_E * multis[i]
QSO_marker = ELG_marker
if want_QSO:
logM_cut_Q_temp = logM_cut_Q + Ac_Q * deltac[i] + Bc_Q * fenv[i]
logM_cut_Q + Ac_Q * deltac[i] + Bc_Q * fenv[i]
QSO_marker += N_cen_QSO(mass[i], logM_cut_Q, sigma_Q) * ic_Q * multis[i]

if randoms[i] <= LRG_marker:
Expand Down Expand Up @@ -291,7 +291,7 @@ def gen_cent(pos, vel, mass, ids, multis, randoms, vdev, deltac, fenv,
qso_z[j3] = pos[i,2]
qso_vz[j3] = vel[i,2] + alpha_c_Q * vdev[i] # velocity bias
# rsd only applies to the z direction
if rsd and origin != None:
if rsd and origin is not None:
nx = qso_x[j3] - origin[0]
ny = qso_y[j3] - origin[1]
nz = qso_z[j3] - origin[2]
Expand Down Expand Up @@ -554,7 +554,7 @@ def gen_sats(ppos, pvel, hvel, hmass, hid, weights, randoms, hdeltac, hfenv,
qso_vy[j3] = hvel[i, 1] + alpha_s_Q * (pvel[i, 1] - hvel[i, 1]) # velocity bias
qso_z[j3] = ppos[i, 2]
qso_vz[j3] = hvel[i, 2] + alpha_s_Q * (pvel[i, 2] - hvel[i, 2]) # velocity bias
if rsd and origin != None:
if rsd and origin is not None:
nx = qso_x[j3] - origin[0]
ny = qso_y[j3] - origin[1]
nz = qso_z[j3] - origin[2]
Expand Down Expand Up @@ -907,7 +907,7 @@ def gen_gal_cat(halo_data, particle_data, tracers, params, Nthread = 16,
"""

if not type(rsd) is bool:
if type(rsd) is not bool:
raise ValueError("Error: rsd has to be a boolean")

# find the halos, populate them with galaxies and write them to files
Expand Down Expand Up @@ -938,7 +938,7 @@ def gen_gal_cat(halo_data, particle_data, tracers, params, Nthread = 16,
os.makedirs(outdir, exist_ok = True)

# save to file
outdict = HOD_dict[tracer].pop('Ncent', None)
HOD_dict[tracer].pop('Ncent', None)
table = Table(HOD_dict[tracer], meta = {'Ncent': Ncent, 'Gal_type': tracer, **tracers[tracer]})
if params['chunk'] == -1:
ascii.write(table, outdir / (f"{tracer}s.dat"), overwrite = True, format = 'ecsv')
Expand Down
13 changes: 5 additions & 8 deletions abacusnbody/hod/prepare_sim.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,7 @@
import numba
import numpy as np
import yaml
from astropy.table import Table
from numba import jit, njit, types
from scipy.interpolate import NearestNDInterpolator
from scipy.ndimage import gaussian_filter
from numba import njit
from scipy.spatial import cKDTree

from abacusnbody.data.compaso_halo_catalog import CompaSOHaloCatalog
Expand Down Expand Up @@ -474,8 +471,8 @@ def prepare_slab(i, savedir, simdir, simname, z_mock, tracer_flags, MT, want_ran
ranksr_parts = np.full(len_old, -1.0)
ranksp_parts = np.full(len_old, -1.0)
ranksc_parts = np.full(len_old, -1.0)
pos_parts = np.full((len_old, 3), -1.0)
vel_parts = np.full((len_old, 3), -1.0)
np.full((len_old, 3), -1.0)
np.full((len_old, 3), -1.0)
hvel_parts = np.full((len_old, 3), -1.0)
Mh_parts = np.full(len_old, -1.0)
Np_parts = np.full(len_old, -1.0)
Expand Down Expand Up @@ -606,7 +603,7 @@ def prepare_slab(i, savedir, simdir, simname, z_mock, tracer_flags, MT, want_ran
if os.path.exists(outfilename_halos):
os.remove(outfilename_halos)
newfile = h5py.File(outfilename_halos, 'w')
dataset = newfile.create_dataset('halos', data = halos[mask_halos])
newfile.create_dataset('halos', data = halos[mask_halos])
newfile.close()

# output the new particle file
Expand Down Expand Up @@ -636,7 +633,7 @@ def prepare_slab(i, savedir, simdir, simname, z_mock, tracer_flags, MT, want_ran
if os.path.exists(outfilename_particles):
os.remove(outfilename_particles)
newfile = h5py.File(outfilename_particles, 'w')
dataset = newfile.create_dataset('particles', data = parts)
newfile.create_dataset('particles', data = parts)
newfile.close()

print("pre process particle number ", len_old, " post process particle number ", len(parts))
Expand Down
2 changes: 1 addition & 1 deletion abacusnbody/metadata/abacussummit.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def get_meta(simname, redshift=None):
if 'CLASS_power_spectrum' in metadata[simname]:
res['CLASS_power_spectrum'] = metadata[simname]['CLASS_power_spectrum']

if redshift != None:
if redshift is not None:
if type(redshift) != str:
redshift = f'z{redshift:.3f}'
if not redshift.startswith('z'):
Expand Down
3 changes: 1 addition & 2 deletions docs/tutorials/light_cones/run_lc_hod.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@

import argparse

import numpy as np
import yaml

from abacusnbody.hod.abacus_hod import AbacusHOD
Expand All @@ -31,7 +30,7 @@ def main(path2config):

# run the HODs (note: the first time you call the function run_hod, the script takes a bit to compile)
newBall = AbacusHOD(sim_params, HOD_params, clustering_params)
mock_dict = newBall.run_hod(tracers=newBall.tracers, want_rsd=want_rsd, write_to_disk=write_to_disk, Nthread=16)
newBall.run_hod(tracers=newBall.tracers, want_rsd=want_rsd, write_to_disk=write_to_disk, Nthread=16)

class ArgParseFormatter(argparse.RawDescriptionHelpFormatter, argparse.ArgumentDefaultsHelpFormatter):
pass
Expand Down
1 change: 0 additions & 1 deletion docs/tutorials/light_cones/util.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import numpy as np
from numba import jit


Expand Down
3 changes: 2 additions & 1 deletion scripts/emulator/generate_cfs/generate_cf.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@
DEFAULT_NTHREAD = len(os.sched_getaffinity(0)) # guess based on affinity mask
DEFAULT_OUTDIR = '.'

log = lambda *args,**kwargs: print(*args,**kwargs,flush=True)
def log(*args, **kwargs):
return print(*args, **kwargs, flush=True)

def prepare_cat(halo_cat_path, ndens):
'''Load and downsample the cat
Expand Down
4 changes: 1 addition & 3 deletions scripts/hod/plot_chains.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import argparse
import os

import getdist
import matplotlib.pyplot as plt
import numpy as np
import yaml
Expand All @@ -25,11 +24,10 @@ def main(path2config):
ch_params = config['ch_config_params']

# parameters
n_iter = ch_params['sampleIterations']
ch_params['sampleIterations']
w_rat = ch_params['walkersRatio']
b_iter = ch_params['burninIterations']
par_names = fit_params.keys()
lab_names = par_names
n_par = len(par_names)

# what are we plotting
Expand Down
2 changes: 1 addition & 1 deletion scripts/hod/run_emcee.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def persistValues(self, posFile, probFile, pos, prob):

probFile.write("\n".join([str(p) for p in prob]))
probFile.write("\n")
probFile.flush();
probFile.flush()

def close(self):
self.samplesFile.close()
Expand Down
4 changes: 2 additions & 2 deletions scripts/hod/run_hod.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def main(path2config):
mock_dict = newBall.run_hod(newBall.tracers, want_rsd, write_to_disk = False, Nthread = 16)
# mock_dict = newBall.gal_reader()
start = time.time()
xirppi = newBall.compute_xirppi(mock_dict, rpbins, pimax, pi_bin_size, Nthread = 32)
newBall.compute_xirppi(mock_dict, rpbins, pimax, pi_bin_size, Nthread = 32)
print("Done xi, total time ", time.time() - start)
# print(xirppi)
# wp = newBall.compute_wp(mock_dict, rpbins, pimax, pi_bin_size)
Expand All @@ -61,7 +61,7 @@ def main(path2config):
start = time.time()
# ngal_dict = newBall.compute_ngal()
# print("Done ngal, took time ", time.time() - start, ngal_dict)
xirppi = newBall.compute_xirppi(mock_dict, rpbins, pimax, pi_bin_size, Nthread = 32)
newBall.compute_xirppi(mock_dict, rpbins, pimax, pi_bin_size, Nthread = 32)
deltat = time.time() - start
print("Done xi, total time ", deltat)
meantime += deltat
Expand Down
10 changes: 5 additions & 5 deletions scripts/hod/run_lc_hod.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,13 @@ def main(path2config):
want_rsd = HOD_params['want_rsd']
write_to_disk = HOD_params['write_to_disk']
bin_params = clustering_params['bin_params']
rpbins = np.logspace(bin_params['logmin'], bin_params['logmax'], bin_params['nbins'] + 1)
pimax = clustering_params['pimax']
pi_bin_size = clustering_params['pi_bin_size']
np.logspace(bin_params['logmin'], bin_params['logmax'], bin_params['nbins'] + 1)
clustering_params['pimax']
clustering_params['pi_bin_size']

# run the HODs
newBall = AbacusHOD(sim_params, HOD_params, clustering_params)
mock_dict = newBall.run_hod(tracers=newBall.tracers, want_rsd=want_rsd, write_to_disk=write_to_disk, Nthread=16)
newBall.run_hod(tracers=newBall.tracers, want_rsd=want_rsd, write_to_disk=write_to_disk, Nthread=16)

# can change some parameter and run again to time
zs = [0.1]
Expand All @@ -48,7 +48,7 @@ def main(path2config):
sim_params['z_mock'] = zs[i]
newBall = AbacusHOD(sim_params, HOD_params, clustering_params)
start = time.time()
mock_dict = newBall.run_hod(tracers=newBall.tracers, want_rsd=want_rsd, write_to_disk=False, Nthread=16)
newBall.run_hod(tracers=newBall.tracers, want_rsd=want_rsd, write_to_disk=False, Nthread=16)
print("Done hod, took time ", time.time() - start)


Expand Down
5 changes: 2 additions & 3 deletions scripts/hod/run_nested.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import numpy as np
import yaml
from dynesty import NestedSampler
from likelihood import PowerData
from scipy import stats

from abacusnbody.hod.abacus_hod import AbacusHOD
Expand All @@ -27,7 +26,7 @@ def lnprob(p, param_mapping, param_tracer, Data, Ball):
# pass them to the mock dictionary
mock_dict = Ball.run_hod(Ball.tracers, Ball.want_rsd, Nthread = 64)

clustering = Ball.compute_wp(mock_dict, Ball.rpbins, Ball.pimax, Ball.pi_bin_size, Nthread = 16)
Ball.compute_wp(mock_dict, Ball.rpbins, Ball.pimax, Ball.pi_bin_size, Nthread = 16)

lnP = Data.compute_likelihood(theory_density)

Expand Down Expand Up @@ -77,7 +76,7 @@ def main(path2config):
nlive = dynesty_config_params['nlive']
maxcall = dynesty_config_params['maxcall']
method = dynesty_config_params['method']
bound = dynesty_config_params['bound']
dynesty_config_params['bound']

# where to record
prefix_chain = os.path.join(os.path.expanduser(dynesty_config_params['path2output']),
Expand Down
2 changes: 1 addition & 1 deletion tests/test_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ def test_halo_lc():
cat = CompaSOHaloCatalog(curdir / 'halo_light_cones/AbacusSummit_base_c000_ph001-abridged/z2.250/',
fields='all',
subsamples=True)
assert(cat.halo_lc == True)
assert(cat.halo_lc is True)

HALO_LC_CAT = refdir / 'halo_lc_cat.asdf'
HALO_LC_SUBSAMPLES = refdir / 'halo_lc_subsample.asdf'
Expand Down
11 changes: 5 additions & 6 deletions tests/test_hod.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import h5py
import numba
import numpy as np
import pytest
import yaml
from astropy.io import ascii
from common import check_close
Expand Down Expand Up @@ -94,11 +93,11 @@ def test_hod(tmp_path, reference_mode = False):

# additional parameter choices
want_rsd = HOD_params['want_rsd']
write_to_disk = HOD_params['write_to_disk']
HOD_params['write_to_disk']
bin_params = clustering_params['bin_params']
rpbins = np.logspace(bin_params['logmin'], bin_params['logmax'], bin_params['nbins'])
pimax = clustering_params['pimax']
pi_bin_size = clustering_params['pi_bin_size']
np.logspace(bin_params['logmin'], bin_params['logmax'], bin_params['nbins'])
clustering_params['pimax']
clustering_params['pi_bin_size']

# create a new abacushod object
newBall = AbacusHOD(sim_params, HOD_params, clustering_params)
Expand Down Expand Up @@ -127,7 +126,7 @@ def test_hod(tmp_path, reference_mode = False):
config['zcv_params']['tracer_dir'] = pjoin(tmp_path, 'zcv_tracer_data')
mock_dict = newBall.run_hod(newBall.tracers, want_rsd = config['HOD_params']['want_rsd'], write_to_disk = False, Nthread = 2)
del mock_dict['ELG'] # drop ELG since zcv works with a single tracer currently
zcv_dict = newBall.apply_zcv(mock_dict, config)
newBall.apply_zcv(mock_dict, config)

if __name__ == '__main__':
with tempfile.TemporaryDirectory() as tmpdir:
Expand Down
Loading

0 comments on commit f5b7cf2

Please sign in to comment.