Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add cython-lint #30

Merged
merged 2 commits into from
Dec 12, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 55 additions & 27 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,11 @@ on:
branches: [ "master" ]
workflow_dispatch:


concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:

linux:
Expand All @@ -33,6 +38,8 @@ jobs:
PYNUCLEUS_BUILD_PARALLELISM: 2
OMPI_CC: ${{ matrix.c-compiler }}
OMPI_CXX: ${{ matrix.cxx-compiler }}
BUILD_PRETTY_IDENTIFIER: "Linux ${{ matrix.c-compiler }} Python ${{ matrix.py-version }}"
BUILD_IDENTIFIER: "Linux-${{ matrix.c-compiler }}-${{ matrix.py-version }}"

steps:
- name: Check out repo
Expand All @@ -45,7 +52,7 @@ jobs:
uses: actions/cache/restore@v3
with:
path: /home/runner/.cache/ccache
key: ccache-${{ runner.os }}-${{ matrix.c-compiler }}-${{ matrix.py-version }}
key: ccache-${{ env.BUILD_IDENTIFIER }}

- uses: actions/setup-python@v4
if: always()
Expand Down Expand Up @@ -77,52 +84,53 @@ jobs:
GH_TOKEN: ${{ github.token }}
run: |
gh extension install actions/gh-actions-cache
gh actions-cache delete ccache-${{ runner.os }}-${{ matrix.c-compiler}}-${{ matrix.py-version }} --confirm
gh actions-cache delete ccache-${{ env.BUILD_IDENTIFIER }} --confirm
continue-on-error: true

- name: Push ccache cache
if: always()
uses: actions/cache/save@v3
with:
path: /home/runner/.cache/ccache
key: ccache-${{ runner.os }}-${{ matrix.c-compiler }}-${{ matrix.py-version }}
key: ccache-${{ env.BUILD_IDENTIFIER }}

- name: Ccache report
if: always()
run: ccache -s

- name: Run tests
if: always()
run: python3 -m pytest --junit-xml=test-results-${{ runner.os }}-${{ matrix.c-compiler }}-${{ matrix.py-version }}.xml tests/
run: python3 -m pytest --junit-xml=test-results-${{ env.BUILD_IDENTIFIER }}.xml tests/

- name: Run flake8
if: always()
run: |
make flake8
mv flake8.xml flake8-${{ runner.os }}-${{ matrix.c-compiler }}-${{ matrix.py-version }}.xml
mv flake8.xml flake8-${{ env.BUILD_IDENTIFIER }}.xml

- name: Archive test results
uses: actions/upload-artifact@v3
- name: Run cython-lint
if: always()
with:
name: Test results
path: test-results-${{ runner.os }}-${{ matrix.c-compiler }}-${{ matrix.py-version }}.xml
run: |
make cython-lint
mv cython-lint.xml cython-lint-${{ env.BUILD_IDENTIFIER }}.xml

- name: Report test results
uses: dorny/test-reporter@v1
- name: Archive results
uses: actions/upload-artifact@v3
if: always()
with:
name: Test report (${{ runner.os }}, ${{ matrix.c-compiler }}, Python ${{ matrix.py-version }})
path: test-results-${{ runner.os }}-${{ matrix.c-compiler }}-${{ matrix.py-version }}.xml
reporter: java-junit
fail-on-error: true
name: Results (${{ env.BUILD_PRETTY_IDENTIFIER }})
path: |
test-results-${{ env.BUILD_IDENTIFIER }}.xml
flake8-${{ env.BUILD_IDENTIFIER }}.xml
cython-lint-${{ env.BUILD_IDENTIFIER }}.xml

- name: Report flake8 results
- name: Report results
uses: dorny/test-reporter@v1
if: always()
with:
name: Flake8 report (${{ runner.os }}, ${{ matrix.c-compiler }}, Python ${{ matrix.py-version }})
path: flake8-${{ runner.os }}-${{ matrix.c-compiler }}-${{ matrix.py-version }}.xml
name: Report (${{ env.BUILD_PRETTY_IDENTIFIER }})
path: |
*-${{ env.BUILD_IDENTIFIER }}.xml
reporter: java-junit
fail-on-error: false

Expand Down Expand Up @@ -157,7 +165,7 @@ jobs:
key: ccache-${{ runner.os }}-${{ matrix.py-version }}

- name: Setup GNU Fortran
uses: modflowpy/install-gfortran-action@v1
uses: fortran-lang/setup-fortran@v1

- uses: actions/setup-python@v4
with:
Expand All @@ -173,6 +181,10 @@ jobs:
if: always()
run: ccache --show-config

- name: Augment path
run: |
echo "$HOME/.local/bin" >> $GITHUB_PATH

- name: Install Python dependencies
run: make prereq && make prereq-extra

Expand Down Expand Up @@ -204,18 +216,34 @@ jobs:
if: always()
run: python3 -m pytest --junit-xml=test-results-${{ runner.os }}-${{ matrix.py-version }}.xml tests/

- name: Archive test results
- name: Run flake8
if: always()
run: |
make flake8
mv flake8.xml flake8-${{ runner.os }}-${{ matrix.py-version }}.xml

- name: Run cython-lint
if: always()
run: |
make cython-lint
mv cython-lint.xml cython-lint-${{ runner.os }}-${{ matrix.py-version }}.xml

- name: Archive results
uses: actions/upload-artifact@v3
if: always()
with:
name: Test results
path: test-results-${{ runner.os }}-${{ matrix.py-version }}.xml
name: Results ${{ github.job }}
path: |
test-results-${{ runner.os }}-${{ matrix.py-version }}.xml
flake8-${{ runner.os }}-${{ matrix.py-version }}.xml
cython-lint-${{ runner.os }}-${{ matrix.py-version }}.xml

- name: Report test results
- name: Report results
uses: dorny/test-reporter@v1
if: always()
with:
name: Test report (${{ runner.os }}, Python ${{ matrix.py-version }})
path: test-results-${{ runner.os }}-${{ matrix.py-version }}.xml
name: Report (${{ github.job }})
path: |
*-${{ runner.os }}-${{ matrix.py-version }}.xml
reporter: java-junit
fail-on-error: true
fail-on-error: false
8 changes: 5 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ ENV VIRTUAL_ENV=/pynucleus/venv
RUN python3 -m venv $VIRTUAL_ENV
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
WORKDIR /pynucleus
RUN make prereq && \
make prereq-extra && \
RUN make prereq PIP_FLAGS=--no-cache-dir && \
make prereq-extra PIP_FLAGS=--no-cache-dir && \
make install && \
python -m pip install --no-cache-dir ipykernel && \
rm -rf build packageTools/build base/build metisCy/build fem/build multilevelSolver/build nl/build
Expand All @@ -45,6 +45,8 @@ RUN echo "alias ls='ls --color=auto -FN'" >> /root/.bashrc \
# allow running MPI as root in the container
# bind MPI ranks to hwthreads
ENV OMPI_MCA_hwloc_base_binding_policy=hwthread \
MPIEXEC_FLAGS=--allow-run-as-root
MPIEXEC_FLAGS=--allow-run-as-root \
OMPI_ALLOW_RUN_AS_ROOT=1 \
OMPI_ALLOW_RUN_AS_ROOT_CONFIRM=1

RUN python -m ipykernel install --name=PyNucleus
12 changes: 10 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,16 @@ prereq:
$(PYTHON) -m pip install $(PIP_FLAGS) $(PIP_INSTALL_FLAGS) scikit-sparse

prereq-extra:
$(PYTHON) -m pip install $(PIP_FLAGS) pytest pytest-html pytest-xdist Sphinx sphinxcontrib-programoutput flake8 flake8-junit-report
$(PYTHON) -m pip install $(PIP_FLAGS) pytest pytest-html pytest-xdist Sphinx sphinxcontrib-programoutput flake8 flake8-junit-report cython-lint

flake8:
$(PYTHON) -m flake8 --output-file=flake8.txt --exit-zero drivers packageTools base metisCy fem multilevelSolver nl tests
$(PYTHON) -m flake8 --output-file=flake8.txt --exit-zero drivers examples packageTools base metisCy fem multilevelSolver nl tests
flake8_junit flake8.txt flake8.xml
rm flake8.txt

cython-lint:
- cython-lint --max-line-length=160 drivers examples packageTools base metisCy fem multilevelSolver nl tests > cython-lint.txt 2>&1
flake8_junit cython-lint.txt cython-lint.xml
rm cython-lint.txt
sed 's/name="flake8"/name="cython-lint"/g' cython-lint.xml > cython-lint2.xml
mv cython-lint2.xml cython-lint.xml
4 changes: 2 additions & 2 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ For example, on Ubuntu podman can be installed with

Instructions for other platforms can be found here: https://podman.io/docs/installation

Once podman is installed, we can download a copy of https://github.com/sandialabs/PyNucleus/compose.yaml and save it to an empty directory.
Once podman is installed, we can download a copy of https://github.com/sandialabs/PyNucleus/blob/master/compose.yaml and save it to an empty directory.
In that directory we then run

.. code-block:: shell
Expand All @@ -121,7 +121,7 @@ For development using PyNucleus it can be useful to launch a Jupyter notebook se

podman compose up pynucleus-jupyter

and then open the access the Jupyter notebook interface at https://localhost:8889
and then open the Jupyter notebook interface at https://localhost:8889


Spack install
Expand Down
11 changes: 6 additions & 5 deletions base/PyNucleus_base/LinearOperator_{SCALAR}.pxi
Original file line number Diff line number Diff line change
Expand Up @@ -651,14 +651,12 @@ cdef class {SCALAR_label}Transpose_Linear_Operator({SCALAR_label}LinearOperator)
cdef INDEX_t matvec(self,
{SCALAR}_t[::1] x,
{SCALAR}_t[::1] y) except -1:
self.A.matvecTrans(x, y)
return 0
return self.A.matvecTrans(x, y)

cdef INDEX_t matvec_no_overwrite(self,
{SCALAR}_t[::1] x,
{SCALAR}_t[::1] y) except -1:
self.A.matvecTrans_no_overwrite(x, y)
return 0
return self.A.matvecTrans_no_overwrite(x, y)

def isSparse(self):
return self.A.isSparse()
Expand All @@ -677,7 +675,10 @@ cdef class {SCALAR_label}Transpose_Linear_Operator({SCALAR_label}LinearOperator)
return Bcsr

def toarray(self):
return self.A.transpose().toarray()
try:
return self.A.transpose().toarray()
except AttributeError:
return np.ascontiguousarray(self.A.toarray().T)

def get_diagonal(self):
return np.array(self.A.diagonal, copy=False)
Expand Down
92 changes: 83 additions & 9 deletions base/PyNucleus_base/linear_operators.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -1213,26 +1213,57 @@ cdef class sumMultiplyOperator(LinearOperator):
cdef:
INDEX_t i
LinearOperator op
int ret
op = self.ops[0]
op.matvec(x, y)
ret = op.matvec(x, y)
scaleScalar(y, self.coeffs[0])
for i in range(1, self.coeffs.shape[0]):
op = self.ops[i]
op.matvec(x, self.z)
ret = min(ret, op.matvec(x, self.z))
assign3(y, y, 1.0, self.z, self.coeffs[i])
return 0
return ret

cdef INDEX_t matvec_no_overwrite(self,
REAL_t[::1] x,
REAL_t[::1] y) except -1:
cdef:
INDEX_t i
LinearOperator op = 0
int ret = 0
for i in range(self.coeffs.shape[0]):
op = self.ops[i]
ret = min(op.matvec(x, self.z), ret)
assign3(y, y, 1.0, self.z, self.coeffs[i])
return ret

cdef INDEX_t matvecTrans(self,
REAL_t[::1] x,
REAL_t[::1] y) except -1:
cdef:
INDEX_t i
LinearOperator op
int ret
op = self.ops[0]
ret = op.matvecTrans(x, y)
scaleScalar(y, self.coeffs[0])
for i in range(1, self.coeffs.shape[0]):
op = self.ops[i]
ret = min(ret, op.matvecTrans(x, self.z))
assign3(y, y, 1.0, self.z, self.coeffs[i])
return ret

cdef INDEX_t matvecTrans_no_overwrite(self,
REAL_t[::1] x,
REAL_t[::1] y) except -1:
cdef:
INDEX_t i
LinearOperator op
int ret = 0
for i in range(self.coeffs.shape[0]):
op = self.ops[i]
op.matvec(x, self.z)
ret = min(ret, op.matvecTrans(x, self.z))
assign3(y, y, 1.0, self.z, self.coeffs[i])
return 0
return ret

def toarray(self):
return sum([c*op.toarray() for c, op in zip(self.coeffs, self.ops)])
Expand Down Expand Up @@ -1436,8 +1467,34 @@ cdef class multiIntervalInterpolationOperator(LinearOperator):
interpolationOperator op
assert self.selected != -1
op = self.ops[self.selected]
op.matvec(x, y)
return 0
return op.matvec(x, y)

cdef INDEX_t matvec_no_overwrite(self,
REAL_t[::1] x,
REAL_t[::1] y) except -1:
cdef:
interpolationOperator op
assert self.selected != -1
op = self.ops[self.selected]
return op.matvec_no_overwrite(x, y)

cdef INDEX_t matvecTrans(self,
REAL_t[::1] x,
REAL_t[::1] y) except -1:
cdef:
interpolationOperator op
assert self.selected != -1
op = self.ops[self.selected]
return op.matvecTrans(x, y)

cdef INDEX_t matvecTrans_no_overwrite(self,
REAL_t[::1] x,
REAL_t[::1] y) except -1:
cdef:
interpolationOperator op
assert self.selected != -1
op = self.ops[self.selected]
return op.matvecTrans_no_overwrite(x, y)

def toarray(self):
assert self.selected != -1
Expand Down Expand Up @@ -1521,8 +1578,25 @@ cdef class delayedConstructionOperator(LinearOperator):
REAL_t[::1] x,
REAL_t[::1] y) except -1:
self.assure_constructed()
self.A.matvec(x, y)
return 0
return self.A.matvec(x, y)

cdef INDEX_t matvec_no_overwrite(self,
REAL_t[::1] x,
REAL_t[::1] y) except -1:
self.assure_constructed()
return self.A.matvec_no_overwrite(x, y)

cdef INDEX_t matvecTrans(self,
REAL_t[::1] x,
REAL_t[::1] y) except -1:
self.assure_constructed()
return self.A.matvecTrans(x, y)

cdef INDEX_t matvecTrans_no_overwrite(self,
REAL_t[::1] x,
REAL_t[::1] y) except -1:
self.assure_constructed()
return self.A.matvecTrans_no_overwrite(x, y)

def toarray(self):
self.assure_constructed()
Expand Down
2 changes: 1 addition & 1 deletion base/PyNucleus_base/utilsFem.py
Original file line number Diff line number Diff line change
Expand Up @@ -646,7 +646,7 @@ def diff(self, d):
result[p.label] = (p.value, d[p.label])
elif isinstance(p.value, (int, INDEX, REAL, float)):
if not np.allclose(p.value, d[p.label],
rtol=rTol, atol=aTol):
rtol=rTol, atol=aTol) and not (np.isnan(p.value) and np.isnan(d[p.label])):
print(p.label, p.value, d[p.label], rTol, aTol, p.rTol, p.aTol)
result[p.label] = (p.value, d[p.label])
else:
Expand Down
2 changes: 1 addition & 1 deletion compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ services:

# Launch with:
# docker compose up
# Then open localhost:8888 in your browser
# Then open localhost:8889 in your browser
pynucleus-jupyter:
image: ghcr.io/sandialabs/pynucleus:latest
build: .
Expand Down
Loading
Loading