diff --git a/.github/workflows/autorelease-default-env.sh b/.github/workflows/autorelease-default-env.sh new file mode 100644 index 00000000..74dd4615 --- /dev/null +++ b/.github/workflows/autorelease-default-env.sh @@ -0,0 +1,5 @@ +INSTALL_AUTORELEASE="python -m pip install autorelease==0.2.3" +if [ -f autorelease-env.sh ]; then + source autorelease-env.sh +fi + diff --git a/.github/workflows/autorelease-deploy.yml b/.github/workflows/autorelease-deploy.yml new file mode 100644 index 00000000..89aaeac3 --- /dev/null +++ b/.github/workflows/autorelease-deploy.yml @@ -0,0 +1,31 @@ +name: Autorelease +on: + release: + types: [published] + +jobs: + deploy_pypi: + runs-on: ubuntu-latest + name: "Deploy to PyPI" + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: "3.x" + - run: | # TODO: move this to an action + source ./.github/workflows/autorelease-default-env.sh + cat autorelease-env.sh >> $GITHUB_ENV + eval $INSTALL_AUTORELEASE + name: "Install autorelease" + - run: | + python -m pip install twine wheel + name: "Install release tools" + - run: | + python setup.py sdist bdist_wheel + twine check dist/* + name: "Build and check package" + - uses: pypa/gh-action-pypi-publish@master + with: + password: ${{ secrets.pypi_password }} + name: "Deploy to testpypi" + diff --git a/.github/workflows/autorelease-gh-rel.yml b/.github/workflows/autorelease-gh-rel.yml new file mode 100644 index 00000000..f9e294eb --- /dev/null +++ b/.github/workflows/autorelease-gh-rel.yml @@ -0,0 +1,27 @@ +name: Autorelease +on: + push: + branches: + - stable + +jobs: + release-gh: + runs-on: ubuntu-latest + name: "Cut release" + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: "3.7" + - run: | # TODO: move this to an action + source ./.github/workflows/autorelease-default-env.sh + cat autorelease-env.sh >> $GITHUB_ENV + eval $INSTALL_AUTORELEASE + name: "Install autorelease" + - run: | + VERSION=`python setup.py --version` + PROJECT=`python setup.py --name` + echo $PROJECT $VERSION + autorelease-release --project $PROJECT --version $VERSION --token $AUTORELEASE_TOKEN + env: + AUTORELEASE_TOKEN: ${{ secrets.AUTORELEASE_TOKEN }} diff --git a/.github/workflows/autorelease-prep.yml b/.github/workflows/autorelease-prep.yml new file mode 100644 index 00000000..48c82ba8 --- /dev/null +++ b/.github/workflows/autorelease-prep.yml @@ -0,0 +1,56 @@ +name: "Autorelease" +on: + pull_request: + branches: + - stable + +defaults: + run: + shell: bash + +jobs: + deploy_testpypi: + runs-on: ubuntu-latest + name: "Deployment test" + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: "3.x" + - run: | # TODO: move this to an action + source ./.github/workflows/autorelease-default-env.sh + cat autorelease-env.sh >> $GITHUB_ENV + eval $INSTALL_AUTORELEASE + name: "Install autorelease" + - run: | + python -m pip install twine wheel + name: "Install release tools" + - run: | + bump-dev-version + python setup.py --version + name: "Bump testpypi dev version" + - run: | + python setup.py sdist bdist_wheel + twine check dist/* + name: "Build and check package" + - uses: pypa/gh-action-pypi-publish@master + with: + password: ${{ secrets.testpypi_password }} + repository_url: https://test.pypi.org/legacy/ + name: "Deploy to testpypi" + test_testpypi: + runs-on: ubuntu-latest + name: "Test deployed" + needs: deploy_testpypi + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: "3.x" + - run: | # TODO: move this to an action + source ./.github/workflows/autorelease-default-env.sh + cat autorelease-env.sh >> $GITHUB_ENV + eval $INSTALL_AUTORELEASE + name: "Install autorelease" + - run: test-testpypi + diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml new file mode 100644 index 00000000..ce27c612 --- /dev/null +++ b/.github/workflows/test-suite.yml @@ -0,0 +1,64 @@ +name: "Tests" + +on: + pull_request: + branches: + - master + - stable + push: + branches: + - master + tags: + - "v*" + schedule: + - cron: "25 5 * * *" + +defaults: + run: + shell: bash -l {0} + +jobs: + test_suite: + runs-on: ubuntu-latest + name: "Unit tests" + strategy: + matrix: + CONDA_PY: + - 3.9 + - 3.8 + - 3.7 + - 3.6 + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + - uses: conda-incubator/setup-miniconda@v2 + with: + auto-update-conda: true + python-version: ${{ matrix.CONDA_PY }} + - name: "Install testing tools" + run: python -m pip install -r ./devtools/tests_require.txt + - name: "Install" + run: | + conda install pip + python -m pip install -e . + - name: "Versions" + run: conda list + - name: "Autorelease check" + env: + PR_BRANCH: ${{ github.event.pull_request.base.ref }} + REF: ${{ github.ref }} + EVENT: ${{ github.event_name }} + run: | + if [ "$EVENT" != "pull_request" ]; then + BRANCH=$REF + else + BRANCH=$PR_BRANCH + fi + python autorelease_check.py --branch $BRANCH --even ${EVENT} + - name: "Unit tests" + run: | + python -c "import paths_cli" + py.test -vv --cov --cov-report xml:cov.xml + - name: "Report coverage" + run: bash <(curl -s https://codecov.io/bash) diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 0c4ce6cb..00000000 --- a/.travis.yml +++ /dev/null @@ -1,51 +0,0 @@ -version: ~> 1.0 - -os: linux -language: python - -notifications: - webhooks: https://coveralls.io/webhook - -branches: - only: - - master - - stable - - docs - - /^v\d+(\.\d+)+/ - -env: - global: - - CANONICAL_PYTHON="3.8" - - CODECLIMATE="" - - PACKAGE_IMPORT_NAME="paths_cli" - - TWINE_USERNAME="dwhswenson" - # TWINE_PASSWORD - # AUTORELEASE_TOKEN - jobs: - - CONDA_PY=3.8 - - CONDA_PY=3.6 - - CONDA_PY=3.7 - -before_install: - - echo "before install" - - git fetch --tags - -install: - # we use conda to manage Python versions, but all the install is pip - - source ./devtools/miniconda_install.sh - - conda create --yes -n ops-cli-py${CONDA_PY} python=$CONDA_PY - - source activate ops-cli-py${CONDA_PY} - - pip install -e . - - pip install -r ./devtools/tests_require.txt - - pip list - -script: - - python -c "import paths_cli" - - python autorelease_check.py --branch ${TRAVIS_BRANCH} --event ${TRAVIS_EVENT_TYPE} #--allow-patch-skip # allow-patch-skip if there was a testpypi problem - - py.test -vv --cov --cov-report xml:cov.xml - -after_success: - - bash <(curl -s https://codecov.io/bash) - -import: - - dwhswenson/autorelease:autorelease-travis.yml@v0.2.1 diff --git a/autorelease-env.sh b/autorelease-env.sh new file mode 100644 index 00000000..3b488eb7 --- /dev/null +++ b/autorelease-env.sh @@ -0,0 +1,2 @@ +INSTALL_AUTORELEASE="python -m pip install autorelease==0.2.3 nose" +PACKAGE_IMPORT_NAME=paths_cli diff --git a/docs/api/index.rst b/docs/api/index.rst new file mode 100644 index 00000000..d1f329bf --- /dev/null +++ b/docs/api/index.rst @@ -0,0 +1,67 @@ +.. _api: + +API +=== + +.. currentmodule:: paths_cli + +CLI and Plugins +--------------- + +.. autosummary:: + :toctree: generated + + OpenPathSamplingCLI + plugin_management.CLIPluginLoader + plugin_management.FilePluginLoader + plugin_management.NamespacePluginLoader + + +Parameter Decorators +-------------------- + +These are the functions used to create the reusable parameter decorators. +Note that you will probably never need to use these; instead, use the +existing parameter decorators. + +.. autosummary:: + :toctree: generated + + param_core.Option + param_core.Argument + param_core.AbstractLoader + param_core.StorageLoader + param_core.OPSStorageLoadNames + param_core.OPSStorageLoadSingle + +Search strategies +----------------- + +These are the various strategies for finding objects in a storage, in +particular if we have to guess because the user didn't provide an explicit +choice or didn't tag. + +.. autosummary:: + :toctree: generated + + param_core.Getter + param_core.GetByName + param_core.GetByNumber + param_core.GetPredefinedName + param_core.GetOnly + param_core.GetOnlyNamed + param_core.GetOnlySnapshot + + +Commands +-------- + +.. autosummary:: + :toctree: generated + :recursive: + + commands.visit_all + commands.equilibrate + commands.pathsampling + commands.append + commands.contents diff --git a/docs/conf.py b/docs/conf.py index 027aee47..a5bc8235 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -41,9 +41,13 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', 'sphinx_click.ext', ] +autosummary_generate = True + # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] diff --git a/docs/for_core/README.md b/docs/for_core/README.md deleted file mode 100644 index ac4f4882..00000000 --- a/docs/for_core/README.md +++ /dev/null @@ -1 +0,0 @@ -This is stuff that really should go into the OPS core docs. diff --git a/docs/for_core/cli.rst b/docs/for_core/cli.rst deleted file mode 100644 index 10a44d93..00000000 --- a/docs/for_core/cli.rst +++ /dev/null @@ -1,174 +0,0 @@ -.. _cli: - -Command Line Interface -====================== - -A separate command line tool for OpenPathSamplng can be installed. It is -available via either ``conda`` (channel ``conda-forge``) or ``pip``, with -the package name ``openpathsampling-cli``. - -Once you install this, you'll have access to the command -``openpathsampling`` in your shell (although we recommend aliasing that to -either ``paths`` or ``ops`` -- save yourself some typing!) - -This command is a gateway to many subcommands, just like ``conda`` and -``pip`` (which have subcommands such as ``install``) or ``git`` (which has -subcommands such as ``clone`` or ``commit``). You can get a full listing all -the subcommands with ``openpathsampling --help``. For more information on -any given subcommand, use ``openpathsampling SUBCOMMAND --help``, replacing -``SUBCOMMAND`` with the subcommand you're interested in. - -Here, we will provide a description of a few of the subcommands that the CLI -tool provides. This documentation may not be fully up-to-date with the more -recent releases of the CLI, so use the CLI help tools to get a fuller -understanding of what is included. - -For more details on how the CLI interprets its arguments, and to learn how -to develop plugins for the CLI, see its documentation. The CLI subcommands -are defined through a plugin system, which makes it very easy for developers -to create new subcommands. - -* CLI documentation: https://openpathsampling-cli.readthedocs.io/ -* CLI code repository: https://github.com/openpathsampling/openpathsampling-cli/ - -Workflow with the CLI ---------------------- - -As always, the process of running a simulation is (1) set up the simulation; -(2) run the simulation; (3) analyze the simulation. The CLI is mainly -focused on step 2, although it also has tools that generally help with OPS -files. - -To use it, you'll want to first set up - - -Finding your way around the CLI -------------------------------- - -Like many command line tools, the OPS CLI has the options ``-h`` or -``--help`` to get help. If you run ``openpathsampling --help`` you should -see something like this:: - - Usage: openpathsampling [OPTIONS] COMMAND [ARGS]... - - OpenPathSampling is a Python library for path sampling simulations. This - command line tool facilitates common tasks when working with - OpenPathSampling. To use it, use one of the subcommands below. For - example, you can get more information about the pathsampling tool with: - - openpathsampling pathsampling --help - - Options: - --log PATH logging configuration file - -h, --help Show this message and exit. - - Simulation Commands: - visit-all Run MD to generate initial trajectories - equilibrate Run equilibration for path sampling - pathsampling Run any path sampling simulation, including TIS variants - - Miscellaneous Commands: - contents list named objects from an OPS .nc file - append add objects from INPUT_FILE to another file - -The ``--log`` option takes a logging configuration file (e.g., `logging.conf -<>`_, and sets that logging behavior. If you use it, it must come before the -subcommand name. - -You can find out more about each subcommand by putting ``--help`` *after* -the subcommand name, e.g., ``openpathsampling pathsampling --help``, which -returns:: - - Usage: openpathsampling pathsampling [OPTIONS] INPUT_FILE - - General path sampling, using setup in INPUT_FILE - - Options: - -o, --output-file PATH output ncfile [required] - -m, --scheme TEXT identifier for the move scheme - -t, --init-conds TEXT identifier for initial conditions (sample set or - trajectory) - -n, --nsteps INTEGER number of Monte Carlo trials to run - -h, --help Show this message and exit. - -Here you see the list of the options for the running a path sampling -simulation. In general, path sampling requires an output -file, a move scheme and initial conditions from some input file, and the -number of steps to run. Note that only the output file is technically -required: the CLI will default to running 0 steps (essentially, testing the -validity of your setup), and it can try to guess the move scheme and initial -conditions. In general, the way it guesses follows the following path: - -1. If there is only one object of the suitable type in the INPUT_FILE, use - that. -2. If there are multiple objects of the correct type, but only one has a - name, use the named object. -3. In special cases it looks for specific names, such as - ``initial_conditions``, and will use those. - -Full details on how various CLI parameters search the storage can be seen in -the `Parameter Interpretation -`_ -section of the CLI docs. - -Simulation Commands -------------------- - -One of the main concepts when working with the CLI is that you can create -all the OPS simulation objects without running the simulation, save them in -an OPS storage file, and then load them again to actually run your -simulation. For simulation commands, the options all deal with loading -simulation objects from storage. - -The simulation commands include ``equilibration``, ``pathsampling``, and -``simulation``. These commands aren'y necessarily mutually exclusive: you -can accomplish an equilibration phase with any of them. The ``simulation`` -command is the most general; if you've any :class:`.PathSimulator` object, -??? - -Here are some of the simulation commands implemented in the OPS CLI: - -* ``pathsampling``: run path sampling with a given move scheme (suitable for - custom TPS schemes as well as TIS/RETIS); must provide move scheme, - iniital conditions, and number of MC steps on command line -* ``simulation``: run arbitrary OPS simulator (including committor and - related); must provide a simulator object and number of steps on the - command line -* ``visit-all``: create initial trajectories by running MD until all states - have been visited (works for MSTIS or any 2-state system); must provide - states, engine, and initial snapshot on command line - -Miscellaneous Commands ----------------------- - -Even for users who prefer to develop their OPS projects entirely in Python, -foregoing the CLI tools to run simulations, some of the "miscellaneous" -commands are likely to be quite useful. Here are some that are available in -the CLI: - -* ``contents``: list all the named objects in an OPS storage, organized by - store (type); this is extremely useful to get the name of an object to use - - -.. * ``strip-snapshots``: create a copy of the input storage file with the - details (coordinates/velocities) of all snapshots removed; this allows you - to make a much smaller copy (with results of CVs) to copy back to a local - computer for analysis - -* ``append`` : add an object from once OPS storage into another one; this is - useful for getting everything into a single file before running a - simulation - -Customizing the CLI -------------------- - -The OPS CLI uses a flexible plugin system to enable users to easily add -custom functionality. This way, you can create and distribute custom -plugins, giving more functionality to other users who would benefit from it, -without adding everything to the core package and thus overwhelming new -users. - -Installing a plugin is easy: just create the directory -``$HOME/.openpathsampling/cli-plugins/``, and copy the plugin Python script -into there. For details on how to write a CLI plugin, see the `CLI -development docs `_. diff --git a/docs/index.rst b/docs/index.rst index fee0e7ac..cc120bd8 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -61,4 +61,5 @@ wrappers around well-tested OPS code. parameters workflows full_cli + api/index diff --git a/docs/plugins.rst b/docs/plugins.rst index 21ea9e25..9ebd928a 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -4,20 +4,15 @@ Plugin Infrastructure ===================== All subcommands to the OpenPathSampling CLI use a plugin infrastructure. -They simply need to be Python modules, following a few rules, that are -placed into the user's ``~/.openpathsampling/cli-plugins/`` directory. +There are two possible ways to distribute plugins (file plugins and +namespace plugins), but a given plugin script could be distributed either +way. -Technically, the code searches two directories for plugins: first, -``$DIRECTORY/commands``, where ``$DIRECTORY`` is the directory where the -main OPS CLI script has been installed (i.e., the directory that corresponds -to the Python package ``paths_cli``). This is where the default commands are -kept. Then it searches the user directory. Duplicate commands will lead to -errors when running the CLI, as you can't register the same name twice. +Writing a plugin script +----------------------- -Other than being in the right place, the script must do the following: +An OPS plugin is simply a Python module that follows a few rules. -* It must be possible to ``exec`` it in an empty namespace (mainly, this - can mean no relative imports). * It must define a variable ``CLI`` that is the main CLI function is assigned to. * It must define a variable ``SECTION`` to determine where to show it in @@ -27,11 +22,15 @@ Other than being in the right place, the script must do the following: ``openpathsampling --help``, but might still be usable. If your command doesn't show in the help, carefully check your spelling of the ``SECTION`` variable. +* The main CLI function must be decorated as a ``click.command``. +* (If distributed as a file plugin) It must be possible to ``exec`` it in an + empty namespace (mainly, this can mean no relative imports). As a suggestion, I (DWHS) tend to structure my plugins as follows: .. code:: python + @click.command("plugin", short_help="brief description") @PARAMETER.clicked(required) def plugin(parameter): plugin_main(PARAMETER.get(parameter)) @@ -40,20 +39,25 @@ As a suggestion, I (DWHS) tend to structure my plugins as follows: import openpathsampling as paths # do the real stuff with OPS ... + return final_status, simulation CLI = plugin SECTION = "MySection" The basic idea is that there's a ``plugin_main`` function that is based on pure OPS, using only inputs that OPS can immediately understand (no need to -go to storage, etc). This is easy to develop/test with OPS. Then there's a -wrapper function whose sole purpose is to convert the command line +process the command line). This is easy to develop/test with OPS. Then +there's a wrapper function whose sole purpose is to convert the command line parameters to something OPS can understand (using the ``get`` method). This wrapper is the ``CLI`` variable. Give it an allowed ``SECTION``, and the plugin is ready! The result is that plugins are astonishingly easy to develop, once you have -the scientific code implemented in a library. +the scientific code implemented in a library. This structure also makes it +very easy to test the plugins: a mock replaces the ``plugin_main`` in +``plugin`` to check that the integration works, and then a simple smoke test +for the ``plugin_main`` is sufficient, since the core code should already be +well-tested. Note that we recommend that the import of OpenPathSampling only be done inside the ``plugin_main`` function. Although this is contrary to normal @@ -61,4 +65,34 @@ Python practice, we do this because tools like tab-autocomplete require that you run the program each time. The import of OPS is rather slow, so we delay it until it is needed, keeping the CLI interface fast and responsive. -.. TODO : look into having the plugin auto-installed using setuptools +Finally, the ``plugin_main`` function returns some sort of final status and +the simulation object that was created (or ``None`` if there wasn't one). +This makes it very easy to chain multiple main functions to make a workflow. + + +Distributing file plugins +------------------------- + +Once you have a plugin module written, the easiest way to install it is to +put it in your ``~/.openpathsampling/cli_plugins/`` directory. This is the +file-based plugin distribution mechanism -- you send the file to someone, +and they put in that directory. + +This is great for plugins shared in a single team, or for creating +reproducible workflows that aren't intended for wide distribution. + + +Distributing namespace plugins +------------------------------ + +If the plugin is part of a larger Python package, or if it is important to +track version numbers or to be able to change which plugins are installed +in particular Python environments, the namespace distribution mechanism is a +better choice. We use `native namespace packages`_, which is a standard way +of making plugins in Python. Plugins should be in the ``paths_cli.plugins`` +namespace. + +.. _native namespace packages: + https://packaging.python.org/guides/packaging-namespace-packages/#native-namespace-packages + + diff --git a/docs/requirements.txt b/docs/requirements.txt index 4636c458..41f08b2a 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -3,3 +3,4 @@ numpy packaging sphinx-click +sphinx >= 3.1 # will this work? diff --git a/setup.cfg b/setup.cfg index e2fb4318..2ca6e44d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = openpathsampling-cli -version = 0.1.0 +version = 0.1.1 # version should end in .dev0 if this isn't to be released description = Command line tool for OpenPathSampling long_description = file: README.md