Skip to content

Commit

Permalink
Merge pull request #409 from Hjorthmedh/init_redux
Browse files Browse the repository at this point in the history
Init redux
  • Loading branch information
Hjorthmedh authored Feb 20, 2024
2 parents 3bb939d + b1e258d commit 6462746
Show file tree
Hide file tree
Showing 11 changed files with 2,020 additions and 4 deletions.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
#!/bin/bash -l
#SBATCH --partition=main
#SBATCH -o log/runSnudda-%j-output.txt
#SBATCH -e log/runSnudda-%j-error.txt
#SBATCH -t 0:59:00
#SBATCH -J SnuddaInput
#SBATCH -A naiss2023-5-231
#SBATCH --nodes=2
#SBATCH -n 128
#SBATCH --cpus-per-task=2
#SBATCH --mem-per-cpu=930M
#SBATCH --mail-type=ALL
module load snic-env


#..
#export OMP_STACKSIZE=128G
ulimit -s unlimited


#let NWORKERS="$SLURM_NTASKS-2"
#let NWORKERS="100"
let NWORKERS="40"

# REMEMBER TO CREATE THE "log" DIRECTORY


export IPNWORKERS=$NWORKERS


export IPYTHONDIR="/cfs/klemming/scratch/${USER:0:1}/$USER/.ipython"
rm -r $IPYTHONDIR
export IPYTHON_PROFILE=default
source $HOME/Snudda/snudda_env/bin/activate


#.. Start the ipcontroller
export FI_CXI_DEFAULT_VNI=$(od -vAn -N4 -tu < /dev/urandom)
srun -n 1 -N 1 -c 2 --exact --overlap --mem=0 ./../../ipcontroller_new.sh &


echo ">>> waiting 60s for controller to start"
sleep 60

#.. Read in CONTROLLERIP
CONTROLLERIP=$(<controller_ip.txt)


##.. Start the engines
echo ">>> starting ${IPNWORKERS} engines "
#srun -n ${IPNWORKERS} -c 2 --exact --overlap ipengine --location=${CONTROLLERIP} --profile=${IPYTHON_PROFILE} --mpi \
#--ipython-dir=${IPYTHONDIR} --timeout=30.0 --log-level=DEBUG \
#--BaseParallelApplication.verbose_crash=True --IPEngine.verbose_crash=True \
#--Kernel.stop_on_error_timeout=1.0 --IPythonKernel.stop_on_error_timeout=1.0 \
#Session.buffer_threshold=4096 Session.copy_threshold=250000 \
#Session.digest_history_size=250000 c.EngineFactory.max_heartbeat_misses=10 c.MPI.use='mpi4py' \
#1> ipe_${SLURM_JOBID}.out 2> ipe_${SLURM_JOBID}.err &

#srun -n ${IPNWORKERS} -c 2 --exact --overlap valgrind --leak-check=full --show-leak-kinds=all \
#ipengine --location=${CONTROLLERIP} --profile=${IPYTHON_PROFILE} --mpi \
#--ipython-dir=${IPYTHONDIR} --timeout=30.0 c.EngineFactory.max_heartbeat_misses=10 c.MPI.use='mpi4py' \
#1> ipe_${SLURM_JOBID}.out 2> ipe_${SLURM_JOBID}.err &

export FI_CXI_DEFAULT_VNI=$(od -vAn -N4 -tu < /dev/urandom)
srun -n ${IPNWORKERS} -c 2 -N ${SLURM_JOB_NUM_NODES} --exact --overlap --mem=0 ipengine \
--location=${CONTROLLERIP} --profile=${IPYTHON_PROFILE} --mpi \
--ipython-dir=${IPYTHONDIR} --timeout=30.0 c.EngineFactory.max_heartbeat_misses=10 c.MPI.use='mpi4py' \
1> ipe_${SLURM_JOBID}.out 2> ipe_${SLURM_JOBID}.err &


echo ">>> waiting 60s for engines to start"
sleep 30

export FI_CXI_DEFAULT_VNI=$(od -vAn -N4 -tu < /dev/urandom)
srun -n 1 -N 1 --exact --overlap --mem=0 ./Dardel_regenerate_input.sh


echo " "

echo "JOB END "`date` start_time_network_connect.txt

wait

Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
#!/bin/bash



SNUDDA_DIR=$HOME/Snudda/snudda
JOBDIR=../networks/sten_3

# SIMSIZE=50000

# If the BasalGangliaData directory exists, then use that for our data
#/cfs/klemming/scratch/${USER:0:1}/$USER/BasalGangliaData/data
#BasalGangliaData/Parkinson/PD0
if [[ -d "$HOME/BasalGangliaData/data" ]]; then
export SNUDDA_DATA="$HOME/BasalGangliaData/data"
echo "Setting SNUDDA_DATA to $SNUDDA_DATA"
else
echo "SNUDDA_DATA environment variable not changed (may be empty): $SNUDDA_DATA"
fi

mkdir -p $JOBDIR

echo "Dardel_regenerate_input.sh should be started with srun -n 1, to only get one process"

echo "SLURM_PROCID = $SLURM_PROCID"

if [ "$SLURM_PROCID" -gt 0 ]; then
mock_string="Not main process"
else

# For debug purposes:
echo "PATH: "$PATH
echo "IPYTHONDIR: "$IPYTHONDIR
echo "PYTHONPATH: "$PYTHONPATH
echo "LD_LIBRARY_PATH: "$LD_LIBRARY_PATH

echo ">>>>>> Main process starting ipcluster"
echo

echo "Start time: " > start_time_network_connect.txt
date >> start_time_network_connect.txt

echo ">>> Input: "`date`
# snudda input ${JOBDIR} --parallel --time 5 --input input.json
snudda input ${JOBDIR} --parallel --time 18 --input input-B.json --networkFile ${JOBDIR}/network-synapses.hdf5 --inputFile ${JOBDIR}/input-spikes-B.hdf5


#.. Shut down cluster
# ipcluster stop
#.. Shutdown ipcontroller
echo "Shutting down ipcontroller"

python ../../ipcontroller_shutdown.py


date
#echo "JOB END "`date` start_time_network_connect.txt

echo "EXITING Dardel_runjob_lateral.sh"

fi
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
#!/bin/bash -l
#SBATCH --partition=main
#SBATCH -o log/Simulate-%j-output.txt
#SBATCH -e log/Simulate-%j-error.txt
#SBATCH -t 9:30:00
#SBATCH --time-min=8:59:00
#SBATCH -J Simulate
#SBATCH -A naiss2023-5-231
#SBATCH --nodes=60
#SBATCH --tasks-per-node=128
#SBATCH --mail-type=ALL

# 2024-02-16: 40 cores per node worked, had 28% free... trying increasing to 45 cores per node
# 2024-02-18: Increasing back up to 128 cores. Memory free is probably more
# dependent on total number of nodes allocated

# You need to point this as the directory where you created the network in
#NETWORK_DIR=/cfs/klemming/home/${USER:0:1}/$USER/Snudda/examples/parallel/KTH_PDC/networks/test_10k
NETWORK_DIR=../networks/sten_3

SIMULATION_CONFIG_WITH_SYNAPSES=experiment_config_sten_3-with-synapses-B.json
SIMULATION_CONFIG_NO_SYNAPSES=experiment_config_sten_3-no-synapses-B.json


# NETWORK_WITH_SYNAPSES_OUTPUT=$NETWORK_DIR/simulation/output-with-synapses-sten_1.hdf5
# NETWORK_NO_SYNAPSES_OUTPUT=$NETWORK_DIR/simulation/output-no-synapses-sten_1.hdf5


export N_WORKERS=$SLURM_NTASKS

module load snic-env
source $HOME/Snudda/snudda_env/bin/activate
SNUDDA_DIR=/cfs/klemming/home/"${USER:0:1}"/$USER/Snudda

# If the BasalGangliaData directory exists, then use that for our data
if [[ -d "/cfs/klemming/home/${USER:0:1}/$USER/BasalGangliaData/data" ]]; then
export SNUDDA_DATA="/cfs/klemming/home/${USER:0:1}/$USER/BasalGangliaData/data"
echo "Setting SNUDDA_DATA to $SNUDDA_DATA"
rm mechanisms
ln -s $SNUDDA_DATA/neurons/mechanisms/ mechanisms
else
echo "SNUDDA_DATA environment variable not changed (may be empty): $SNUDDA_DATA"
rm mechanisms
ln -s ../../../../snudda/data/neurons/mechanisms/
fi


NETWORK_INFO_FILE=$NETWORK_DIR/network-synapses.hdf5
# NETWORK_INFO_FILE=$NETWORK_DIR/network-synapses-minimal.hdf5
NETWORK_INPUT_FILE=$NETWORK_DIR/input-spikes-B.hdf5
# NETWORK_VOLTAGE_FILE=$NETWORK_DIR/simulation/voltage-trace-${SLURM_JOBID}.txt



echo "Network dir: "$NETWORK_DIR

export PATH=$SNUDDA_DIR/snudda_env/bin/:$PATH
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$CRAY_LD_LIBRARY_PATH
export PYTHONPATH=$SNUDDA_DIR/snudda_env/lib/python3.9/

##############

rm -r x86_64

export CXX=CC
export CC=cc
export FC=ftn
export MPICC=cc
export MPICXX=CC

CC --version

echo "About to run nrnivmodl"
which nrnivmodl

# srun -n nrnivmodl mechanisms/

srun -n 1 nrnivmodl -incflags "-lltdl=/usr/lib64/libltdl.so.7 -lreadline=/lib64/libreadline.so.7 -lncurses=/lib64/libncurses.so.6.1" -loadflags "-DLTDL_LIBRARY=/usr/lib64/libltdl.so.7 -DREADLINE_LIBRARY=/lib64/libreadline.so.7 -DNCURSES_LIBRARY=/lib64/libncurses.so.6.1" mechanisms/

# GJ disabled
# srun -n $N_WORKERS $SNUDDA_DIR/examples/parallel/x86_64/special -mpi -python $SNUDDA_DIR/simulate/simulate.py $NETWORK_INFO_FILE $NETWORK_INPUT_FILE --disableGJ --time 3.5 --voltOut $NETWORK_VOLTAGE_FILE

# GJ active
# srun -n $N_WORKERS $SNUDDA_DIR/examples/parallel/KTH_PDC/x86_64/special -mpi -python $SNUDDA_DIR/snudda/simulate/simulate.py $NETWORK_INFO_FILE $NETWORK_INPUT_FILE --time 18 --outputFile $NETWORK_WITH_SYNAPSES_OUTPUT

# srun -n $N_WORKERS $SNUDDA_DIR/examples/parallel/KTH_PDC/x86_64/special -mpi -python $SNUDDA_DIR/snudda/simulate/simulate.py $NETWORK_INFO_FILE $NETWORK_INPUT_FILE --time 18 --disableSyn --outputFile $NETWORK_NO_SYNAPSES_OUTPUT


# Changed to using the simulation_config

srun -n $N_WORKERS $SNUDDA_DIR/examples/parallel/KTH_PDC/x86_64/special -mpi -python $SNUDDA_DIR/snudda/simulate/simulate.py dummy_file dummy_file --simulation_config $SIMULATION_CONFIG_WITH_SYNAPSES

srun -n $N_WORKERS $SNUDDA_DIR/examples/parallel/KTH_PDC/x86_64/special -mpi -python $SNUDDA_DIR/snudda/simulate/simulate.py dummy_file dummy_file --simulation_config $SIMULATION_CONFIG_NO_SYNAPSES


# srun -n $N_WORKERS $SNUDDA_DIR/examples/parallel/KTH_PDC/x86_64/special -mpi -python $SNUDDA_DIR/snudda/simulate/simulate.py $NETWORK_INFO_FILE $NETWORK_INPUT_FILE --time 5 --noVolt
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{
"network_file": "../networks/sten_3/network-synapses.hdf5",
"input_file": "../networks/sten_3/input-spikes-B.hdf5",
"output_file": "../networks/sten_3/simulation/output-no-synapses-sten_3-B.hdf5",
"log_file": "../networks/sten_3/log/network-simulation-no-synapses-log-3-B.txt",
"sample_dt": 0.01,
"time": 18.0,
"disable_synapses": true,
"record_all_soma": true,
"record_all_compartments": [0, 500, 1000, 1500, 2000, 2500, 3000, 3500, 4000, 4500, 5000, 5500, 6000, 6500, 7000, 7500]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{
"network_file": "../networks/sten_3/network-synapses.hdf5",
"input_file": "../networks/sten_3/input-spikes-B.hdf5",
"output_file": "../networks/sten_3/simulation/output-with-synapses-sten_3-B.hdf5",
"log_file": "../networks/sten_3/log/network-simulation-with-synapses-log-3-B.txt",
"sample_dt": 0.01,
"time": 18.0,
"record_all_soma": true,
"record_all_compartments": [0, 500, 1000, 1500, 2000, 2500, 3000, 3500, 4000, 4500, 5000, 5500, 6000, 6500, 7000, 7500]
}
62 changes: 62 additions & 0 deletions examples/parallel/KTH_PDC/lateral_inhibition/sten_3/input-B.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
{
"dSPN": {
"cortical:1" : {
"generator" : "poisson",
"start" : [1, 4, 7, 10, 13, 16],
"end" : [3, 6, 9, 12, 15, 18],
"frequency" : [4, 4, 6, 6, 8, 8],
"population_unit_id" : 1
},

"cortical:2" : {
"generator" : "poisson",
"start" : [4, 10, 16],
"end" : [6, 12, 18],
"frequency" : [10],
"population_unit_id" : 2
},

"cortical:0" : {
"generator" : "poisson",
"start" : [0],
"end" : [18],
"frequency" : [2],
"population_unit_id" : 0
}

},

"iSPN": {
"cortical:1" : {
"generator" : "poisson",
"start" : [1, 4, 7, 10, 13, 16],
"end" : [3, 6, 9, 12, 15, 18],
"frequency" : [4, 4, 6, 6, 8, 8],
"population_unit_id" : 1
},

"cortical:2" : {
"generator" : "poisson",
"start" : [4, 10, 16],
"end" : [6, 12, 18],
"frequency" : [10],
"population_unit_id" : 2
},
"cortical:0" : {
"generator" : "poisson",
"start" : [0],
"end" : [18],
"frequency" : [2],
"population_unit_id" : 0
}

},
"FS": {
"cortical" : {
"generator" : "poisson",
"start" : [0],
"end" : [18],
"frequency" : [2]
}
}
}
2 changes: 1 addition & 1 deletion snudda/init/init.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def __init__(self,

if struct_def:
for sn in struct_def:
if "stay_inside" in inspect.getargspec(struct_func[sn]).args:
if "stay_inside" in inspect.getfullargspec(struct_func[sn]).args:
print(f"Adding {sn} with {struct_def[sn]} neurons (stay_inside={honor_stay_inside})")
struct_func[sn](num_neurons=struct_def[sn], neurons_dir=neurons_dir, stay_inside=honor_stay_inside)
else:
Expand Down
9 changes: 7 additions & 2 deletions snudda/input/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -726,6 +726,11 @@ def make_neuron_input_parallel(self):

if meta_inp_name == existing_inp_name.split(":")[0]:

# This is so that we can have multiple overrides of for example cortical input
# with cortical:A, cortical:B, cortical:C to the same set of neurons
# without the input definitions overwriting each other
extra_copy_inp_data = copy.deepcopy(meta_inp_data_copy)

if "population_unit_id" in input_info[existing_inp_name] \
and self.network_data["neurons"][neuron_id]["population_unit"] \
!= input_info[existing_inp_name]["population_unit_id"]:
Expand All @@ -746,9 +751,9 @@ def make_neuron_input_parallel(self):
if key == "parameter_list" and data is None:
continue

meta_inp_data_copy[key] = data
extra_copy_inp_data[key] = data

input_info[existing_inp_name] = meta_inp_data_copy
input_info[existing_inp_name] = extra_copy_inp_data
data_updated = True

if not data_updated:
Expand Down
3 changes: 3 additions & 0 deletions snudda/place/region_mesh_redux.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,6 +222,9 @@ def _remove_close_neurons_helper(self, points, remove_fraction=0.05):
sorted_counts = counts[sort_idx]

first_pair = np.argmax(sorted_counts == 1)
if sorted_counts[first_pair] != 1:
first_pair = len(sorted_counts) - 1 # Basically use remove_fraction_idx

remove_fraction_idx = int(np.ceil(remove_fraction*len(sorted_offenders)))
remove_idx = sorted_offenders[:min(first_pair, remove_fraction_idx)]

Expand Down
2 changes: 1 addition & 1 deletion snudda/simulate/simulate.py
Original file line number Diff line number Diff line change
Expand Up @@ -1260,7 +1260,7 @@ def add_external_input(self, input_file=None):
(f"Converting {self.neurons[neuron_id].name} {par}={syn_params[par]} "
f"we get {par_value}, "
f"but expected >= 0.01 and < 10000")

# print(f"Setting {par} to {par_value}.")
setattr(syn, par, par_value)

# Need to save references, otherwise they will be freed
Expand Down

0 comments on commit 6462746

Please sign in to comment.