diff --git a/.circleci/config.yml b/.circleci/config.yml index 116bdc866..879df07f0 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -3,7 +3,7 @@ version: 2 defaults: &defaults working_directory: /go/src/github.com/cosmos/cosmos-sdk docker: - - image: circleci/golang:1.10.3 + - image: circleci/golang:1.11.1 environment: GOBIN: /tmp/workspace/bin @@ -39,14 +39,6 @@ jobs: paths: - bin - profiles - - save_cache: - key: v1-pkg-cache - paths: - - /go/pkg - - save_cache: - key: v1-tree-{{ .Environment.CIRCLE_SHA1 }} - paths: - - /go/src/github.com/cosmos/cosmos-sdk lint: <<: *defaults @@ -54,14 +46,17 @@ jobs: steps: - attach_workspace: at: /tmp/workspace - - restore_cache: - key: v1-pkg-cache - - restore_cache: - key: v1-tree-{{ .Environment.CIRCLE_SHA1 }} + - checkout + - run: + name: dependencies + command: | + export PATH="$GOBIN:$PATH" + make get_vendor_deps - run: name: Get metalinter command: | export PATH="$GOBIN:$PATH" + make get_tools make get_dev_tools - run: name: Lint source @@ -69,21 +64,24 @@ jobs: export PATH="$GOBIN:$PATH" make test_lint - test_cli: + integration_tests: <<: *defaults parallelism: 1 steps: - attach_workspace: at: /tmp/workspace - - restore_cache: - key: v1-pkg-cache - - restore_cache: - key: v1-tree-{{ .Environment.CIRCLE_SHA1 }} + - checkout + - run: + name: dependencies + command: | + export PATH="$GOBIN:$PATH" + make get_vendor_deps - run: name: Test cli command: | export PATH="$GOBIN:$PATH" make test_cli + make test_examples test_sim_modules: <<: *defaults @@ -91,10 +89,12 @@ jobs: steps: - attach_workspace: at: /tmp/workspace - - restore_cache: - key: v1-pkg-cache - - restore_cache: - key: v1-tree-{{ .Environment.CIRCLE_SHA1 }} + - checkout + - run: + name: dependencies + command: | + export PATH="$GOBIN:$PATH" + make get_vendor_deps - run: name: Test individual module simulations command: | @@ -107,10 +107,12 @@ jobs: steps: - attach_workspace: at: /tmp/workspace - - restore_cache: - key: v1-pkg-cache - - restore_cache: - key: v1-tree-{{ .Environment.CIRCLE_SHA1 }} + - checkout + - run: + name: dependencies + command: | + export PATH="$GOBIN:$PATH" + make get_vendor_deps - run: name: Test individual module simulations command: | @@ -123,32 +125,55 @@ jobs: steps: - attach_workspace: at: /tmp/workspace - - restore_cache: - key: v1-pkg-cache - - restore_cache: - key: v1-tree-{{ .Environment.CIRCLE_SHA1 }} + - checkout + - run: + name: dependencies + command: | + export PATH="$GOBIN:$PATH" + make get_vendor_deps - run: name: Test full Gaia simulation command: | export PATH="$GOBIN:$PATH" make test_sim_gaia_fast + test_sim_gaia_multi_seed: + <<: *defaults + parallelism: 1 + steps: + - attach_workspace: + at: /tmp/workspace + - checkout + - run: + name: dependencies + command: | + export PATH="$GOBIN:$PATH" + make get_vendor_deps + - run: + name: Test multi-seed Gaia simulation + command: | + export PATH="$GOBIN:$PATH" + make test_sim_gaia_multi_seed + test_cover: <<: *defaults parallelism: 4 steps: - attach_workspace: at: /tmp/workspace - - restore_cache: - key: v1-pkg-cache - - restore_cache: - key: v1-tree-{{ .Environment.CIRCLE_SHA1 }} + - checkout + - run: + name: dependencies + command: | + export PATH="$GOBIN:$PATH" + make get_vendor_deps - run: mkdir -p /tmp/logs - run: name: Run tests command: | export PATH="$GOBIN:$PATH" make install + export VERSION="$(git describe --tags --long | sed 's/v\(.*\)/\1/')" for pkg in $(go list github.com/cosmos/cosmos-sdk/... | grep -v github.com/cosmos/cosmos-sdk/cmd/gaia/cli_test | grep -v '/simulation' | circleci tests split --split-by=timings); do id=$(basename "$pkg") GOCACHE=off go test -timeout 8m -race -coverprofile=/tmp/workspace/profiles/$id.out -covermode=atomic "$pkg" | tee "/tmp/logs/$id-$RANDOM.log" @@ -166,8 +191,12 @@ jobs: steps: - attach_workspace: at: /tmp/workspace - - restore_cache: - key: v1-tree-{{ .Environment.CIRCLE_SHA1 }} + - checkout + - run: + name: dependencies + command: | + export PATH="$GOBIN:$PATH" + make get_vendor_deps - run: name: gather command: | @@ -196,6 +225,12 @@ jobs: - run: name: run localnet and exit on failure command: | + pushd /tmp + wget https://dl.google.com/go/go1.11.linux-amd64.tar.gz + sudo tar -xvf go1.11.linux-amd64.tar.gz + sudo rm -rf /usr/local/go + sudo mv go /usr/local + popd set -x make get_tools make get_vendor_deps @@ -212,7 +247,7 @@ workflows: - lint: requires: - setup_dependencies - - test_cli: + - integration_tests: requires: - setup_dependencies - test_sim_modules: @@ -224,6 +259,9 @@ workflows: - test_sim_gaia_fast: requires: - setup_dependencies + - test_sim_gaia_multi_seed: + requires: + - setup_dependencies - test_cover: requires: - setup_dependencies diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index f7e2d0fd6..3a3d666e0 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -4,14 +4,17 @@ v Before smashing the submit button please review the checkboxes. v If a checkbox is n/a - please still include it but + a little note why ☺ > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > --> +- Targeted PR against correct branch (see [CONTRIBUTING.md](https://github.com/cosmos/cosmos-sdk/blob/develop/CONTRIBUTING.md#pr-targeting)) + - [ ] Linked to github-issue with discussion and accepted design OR link to spec that describes this work. -- [ ] Updated all relevant documentation (`docs/`) -- [ ] Updated all relevant code comments - [ ] Wrote tests -- [ ] Added entries in `PENDING.md` that include links to the relevant issue or PR that most accurately describes the change. -- [ ] Updated `cmd/gaia` and `examples/` -___________________________________ +- [ ] Updated relevant documentation (`docs/`) +- [ ] Added entries in `PENDING.md` with issue # +- [ ] rereviewed `Files changed` in the github PR explorer + +______ + For Admin Use: -- [ ] Added appropriate labels to PR (ex. wip, ready-for-review, docs) -- [ ] Reviewers Assigned -- [ ] Squashed all commits, uses message "Merge pull request #XYZ: [title]" ([coding standards](https://github.com/tendermint/coding/blob/master/README.md#merging-a-pr)) +- Added appropriate labels to PR (ex. wip, ready-for-review, docs) +- Reviewers Assigned +- Squashed all commits, uses message "Merge pull request #XYZ: [title]" ([coding standards](https://github.com/tendermint/coding/blob/master/README.md#merging-a-pr)) diff --git a/.gitignore b/.gitignore index eb10faca7..cbc5e5e22 100644 --- a/.gitignore +++ b/.gitignore @@ -2,11 +2,15 @@ .DS_Store *.swp *.swo +*.swl +*.swm +*.swn .vscode .idea # Build vendor +.vendor-new build tools/bin/* examples/build/* @@ -16,6 +20,7 @@ docs/_build examples/basecoin/app/data baseapp/data/* client/lcd/keys/* +client/lcd/statik/statik.go mytestnet # Testing @@ -34,3 +39,8 @@ vagrant # Graphviz dependency-graph.png + +# Latex +*.aux +*.out +*.synctex.gz diff --git a/CHANGELOG.md b/CHANGELOG.md index 479f5913a..322142889 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,248 @@ # Changelog +## 0.25.0 + +*October 24th, 2018* + +BREAKING CHANGES + +* Gaia REST API (`gaiacli advanced rest-server`) + * [x/stake] Validator.Owner renamed to Validator.Operator + * [\#595](https://github.com/cosmos/cosmos-sdk/issues/595) Connections to the REST server are now secured using Transport Layer Security by default. The --insecure flag is provided to switch back to insecure HTTP. + * [gaia-lite] [\#2258](https://github.com/cosmos/cosmos-sdk/issues/2258) Split `GET stake/delegators/{delegatorAddr}` into `GET stake/delegators/{delegatorAddr}/delegations`, `GET stake/delegators/{delegatorAddr}/unbonding_delegations` and `GET stake/delegators/{delegatorAddr}/redelegations` + +* Gaia CLI (`gaiacli`) + * [x/stake] Validator.Owner renamed to Validator.Operator + * [cli] unsafe_reset_all, show_validator, and show_node_id have been renamed to unsafe-reset-all, show-validator, and show-node-id + * [cli] [\#1983](https://github.com/cosmos/cosmos-sdk/issues/1983) --print-response now defaults to true in commands that create and send a transaction + * [cli] [\#1983](https://github.com/cosmos/cosmos-sdk/issues/1983) you can now pass --pubkey or --address to gaiacli keys show to return a plaintext representation of the key's address or public key for use with other commands + * [cli] [\#2061](https://github.com/cosmos/cosmos-sdk/issues/2061) changed proposalID in governance REST endpoints to proposal-id + * [cli] [\#2014](https://github.com/cosmos/cosmos-sdk/issues/2014) `gaiacli advanced` no longer exists - to access `ibc`, `rest-server`, and `validator-set` commands use `gaiacli ibc`, `gaiacli rest-server`, and `gaiacli tendermint`, respectively + * [makefile] `get_vendor_deps` no longer updates lock file it just updates vendor directory. Use `update_vendor_deps` to update the lock file. [#2152](https://github.com/cosmos/cosmos-sdk/pull/2152) + * [cli] [\#2221](https://github.com/cosmos/cosmos-sdk/issues/2221) All commands that + utilize a validator's operator address must now use the new Bech32 prefix, + `cosmosvaloper`. + * [cli] [\#2190](https://github.com/cosmos/cosmos-sdk/issues/2190) `gaiacli init --gen-txs` is now `gaiacli init --with-txs` to reduce confusion + * [cli] [\#2073](https://github.com/cosmos/cosmos-sdk/issues/2073) --from can now be either an address or a key name + * [cli] [\#1184](https://github.com/cosmos/cosmos-sdk/issues/1184) Subcommands reorganisation, see [\#2390](https://github.com/cosmos/cosmos-sdk/pull/2390) for a comprehensive list of changes. + * [cli] [\#2524](https://github.com/cosmos/cosmos-sdk/issues/2524) Add support offline mode to `gaiacli tx sign`. Lookups are not performed if the flag `--offline` is on. + * [cli] [\#2570](https://github.com/cosmos/cosmos-sdk/pull/2570) Add commands to query deposits on proposals + +* Gaia + * Make the transient store key use a distinct store key. [#2013](https://github.com/cosmos/cosmos-sdk/pull/2013) + * [x/stake] [\#1901](https://github.com/cosmos/cosmos-sdk/issues/1901) Validator type's Owner field renamed to Operator; Validator's GetOwner() renamed accordingly to comply with the SDK's Validator interface. + * [docs] [#2001](https://github.com/cosmos/cosmos-sdk/pull/2001) Update slashing spec for slashing period + * [x/stake, x/slashing] [#1305](https://github.com/cosmos/cosmos-sdk/issues/1305) - Rename "revoked" to "jailed" + * [x/stake] [#1676] Revoked and jailed validators put into the unbonding state + * [x/stake] [#1877] Redelegations/unbonding-delegation from unbonding validator have reduced time + * [x/slashing] [\#1789](https://github.com/cosmos/cosmos-sdk/issues/1789) Slashing changes for Tendermint validator set offset (NextValSet) + * [x/stake] [\#2040](https://github.com/cosmos/cosmos-sdk/issues/2040) Validator + operator type has now changed to `sdk.ValAddress` + * [x/stake] [\#2221](https://github.com/cosmos/cosmos-sdk/issues/2221) New + Bech32 prefixes have been introduced for a validator's consensus address and + public key: `cosmosvalcons` and `cosmosvalconspub` respectively. Also, existing Bech32 prefixes have been + renamed for accounts and validator operators: + * `cosmosaccaddr` / `cosmosaccpub` => `cosmos` / `cosmospub` + * `cosmosvaladdr` / `cosmosvalpub` => `cosmosvaloper` / `cosmosvaloperpub` + * [x/stake] [#1013] TendermintUpdates now uses transient store + * [x/stake] [\#2435](https://github.com/cosmos/cosmos-sdk/issues/2435) Remove empty bytes from the ValidatorPowerRank store key + * [x/gov] [#2195] Governance uses BFT Time + * [x/gov] [\#2256](https://github.com/cosmos/cosmos-sdk/issues/2256) Removed slashing for governance non-voting validators + * [simulation] [\#2162](https://github.com/cosmos/cosmos-sdk/issues/2162) Added back correct supply invariants + * [x/slashing] [\#2430](https://github.com/cosmos/cosmos-sdk/issues/2430) Simulate more slashes, check if validator is jailed before jailing + * [x/stake] [\#2393](https://github.com/cosmos/cosmos-sdk/issues/2393) Removed `CompleteUnbonding` and `CompleteRedelegation` Msg types, and instead added unbonding/redelegation queues to endblocker + * [x/mock/simulation] [\#2501](https://github.com/cosmos/cosmos-sdk/issues/2501) Simulate transactions & invariants for fee distribution, and fix bugs discovered in the process + * [x/auth] Simulate random fee payments + * [cmd/gaia/app] Simulate non-zero inflation + * [x/stake] Call hooks correctly in several cases related to delegation/validator updates + * [x/stake] Check full supply invariants, including yet-to-be-withdrawn fees + * [x/stake] Remove no-longer-in-use store key + * [x/slashing] Call hooks correctly when a validator is slashed + * [x/slashing] Truncate withdrawals (unbonding, redelegation) and burn change + * [x/mock/simulation] Ensure the simulation cannot set a proposer address of nil + * [x/mock/simulation] Add more event logs on begin block / end block for clarity + * [x/mock/simulation] Correctly set validator power in abci.RequestBeginBlock + * [x/minting] Correctly call stake keeper to track inflated supply + * [x/distribution] Sanity check for nonexistent rewards + * [x/distribution] Truncate withdrawals and return change to the community pool + * [x/distribution] Add sanity checks for incorrect accum / total accum relations + * [x/distribution] Correctly calculate total power using Tendermint updates + * [x/distribution] Simulate withdrawal transactions + * [x/distribution] Fix a bug where the fee pool was not correctly tracked on WithdrawDelegatorRewardsAll + * [x/stake] [\#1673](https://github.com/cosmos/cosmos-sdk/issues/1673) Validators are no longer deleted until they can no longer possibly be slashed + * [\#1890](https://github.com/cosmos/cosmos-sdk/issues/1890) Start chain with initial state + sequence of transactions + * [cli] Rename `gaiad init gentx` to `gaiad gentx`. + * [cli] Add `--skip-genesis` flag to `gaiad init` to prevent `genesis.json` generation. + * Drop `GenesisTx` in favor of a signed `StdTx` with only one `MsgCreateValidator` message. + * [cli] Port `gaiad init` and `gaiad testnet` to work with `StdTx` genesis transactions. + * [cli] Add `--moniker` flag to `gaiad init` to override moniker when generating `genesis.json` - i.e. it takes effect when running with the `--with-txs` flag, it is ignored otherwise. + +* SDK + * [core] [\#2219](https://github.com/cosmos/cosmos-sdk/issues/2219) Update to Tendermint 0.24.0 + * Validator set updates delayed by one block + * BFT timestamp that can safely be used by applications + * Fixed maximum block size enforcement + * [core] [\#1807](https://github.com/cosmos/cosmos-sdk/issues/1807) Switch from use of rational to decimal + * [types] [\#1901](https://github.com/cosmos/cosmos-sdk/issues/1901) Validator interface's GetOwner() renamed to GetOperator() + * [x/slashing] [#2122](https://github.com/cosmos/cosmos-sdk/pull/2122) - Implement slashing period + * [types] [\#2119](https://github.com/cosmos/cosmos-sdk/issues/2119) Parsed error messages and ABCI log errors to make them more human readable. + * [types] [\#2407](https://github.com/cosmos/cosmos-sdk/issues/2407) MulInt method added to big decimal in order to improve efficiency of slashing + * [simulation] Rename TestAndRunTx to Operation [#2153](https://github.com/cosmos/cosmos-sdk/pull/2153) + * [simulation] Remove log and testing.TB from Operation and Invariants, in favor of using errors [\#2282](https://github.com/cosmos/cosmos-sdk/issues/2282) + * [simulation] Remove usage of keys and addrs in the types, in favor of simulation.Account [\#2384](https://github.com/cosmos/cosmos-sdk/issues/2384) + * [tools] Removed gocyclo [#2211](https://github.com/cosmos/cosmos-sdk/issues/2211) + * [baseapp] Remove `SetTxDecoder` in favor of requiring the decoder be set in baseapp initialization. [#1441](https://github.com/cosmos/cosmos-sdk/issues/1441) + * [baseapp] [\#1921](https://github.com/cosmos/cosmos-sdk/issues/1921) Add minimumFees field to BaseApp. + * [store] Change storeInfo within the root multistore to use tmhash instead of ripemd160 [\#2308](https://github.com/cosmos/cosmos-sdk/issues/2308) + * [codec] [\#2324](https://github.com/cosmos/cosmos-sdk/issues/2324) All referrences to wire have been renamed to codec. Additionally, wire.NewCodec is now codec.New(). + * [types] [\#2343](https://github.com/cosmos/cosmos-sdk/issues/2343) Make sdk.Msg have a names field, to facilitate automatic tagging. + * [baseapp] [\#2366](https://github.com/cosmos/cosmos-sdk/issues/2366) Automatically add action tags to all messages + * [x/auth] [\#2377](https://github.com/cosmos/cosmos-sdk/issues/2377) auth.StdSignMsg -> txbuilder.StdSignMsg + * [x/staking] [\#2244](https://github.com/cosmos/cosmos-sdk/issues/2244) staking now holds a consensus-address-index instead of a consensus-pubkey-index + * [x/staking] [\#2236](https://github.com/cosmos/cosmos-sdk/issues/2236) more distribution hooks for distribution + * [x/stake] [\#2394](https://github.com/cosmos/cosmos-sdk/issues/2394) Split up UpdateValidator into distinct state transitions applied only in EndBlock + * [x/slashing] [\#2480](https://github.com/cosmos/cosmos-sdk/issues/2480) Fix signing info handling bugs & faulty slashing + * [x/stake] [\#2412](https://github.com/cosmos/cosmos-sdk/issues/2412) Added an unbonding validator queue to EndBlock to automatically update validator.Status when finished Unbonding + * [x/stake] [\#2500](https://github.com/cosmos/cosmos-sdk/issues/2500) Block conflicting redelegations until we add an index + * [x/params] Global Paramstore refactored + * [types] [\#2506](https://github.com/cosmos/cosmos-sdk/issues/2506) sdk.Dec MarshalJSON now marshals as a normal Decimal, with 10 digits of decimal precision + * [x/stake] [\#2508](https://github.com/cosmos/cosmos-sdk/issues/2508) Utilize Tendermint power for validator power key + * [x/stake] [\#2531](https://github.com/cosmos/cosmos-sdk/issues/2531) Remove all inflation logic + * [x/mint] [\#2531](https://github.com/cosmos/cosmos-sdk/issues/2531) Add minting module and inflation logic + * [x/auth] [\#2540](https://github.com/cosmos/cosmos-sdk/issues/2540) Rename `AccountMapper` to `AccountKeeper`. + * [types] [\#2456](https://github.com/cosmos/cosmos-sdk/issues/2456) Renamed msg.Name() and msg.Type() to msg.Type() and msg.Route() respectively + +* Tendermint + * Update tendermint version from v0.23.0 to v0.25.0, notable changes + * Mempool now won't build too large blocks, or too computationally expensive blocks + * Maximum tx sizes and gas are now removed, and are implicitly the blocks maximums + * ABCI validators no longer send the pubkey. The pubkey is only sent in validator updates + * Validator set changes are now delayed by one block + * Block header now includes the next validator sets hash + * BFT time is implemented + * Secp256k1 signature format has changed + * There is now a threshold multisig format + * See the [tendermint changelog](https://github.com/tendermint/tendermint/blob/master/CHANGELOG.md) for other changes. + +FEATURES + +* Gaia REST API (`gaiacli advanced rest-server`) + * [gaia-lite] Endpoints to query staking pool and params + * [gaia-lite] [\#2110](https://github.com/cosmos/cosmos-sdk/issues/2110) Add support for `simulate=true` requests query argument to endpoints that send txs to run simulations of transactions + * [gaia-lite] [\#966](https://github.com/cosmos/cosmos-sdk/issues/966) Add support for `generate_only=true` query argument to generate offline unsigned transactions + * [gaia-lite] [\#1953](https://github.com/cosmos/cosmos-sdk/issues/1953) Add /sign endpoint to sign transactions generated with `generate_only=true`. + * [gaia-lite] [\#1954](https://github.com/cosmos/cosmos-sdk/issues/1954) Add /broadcast endpoint to broadcast transactions signed by the /sign endpoint. + * [gaia-lite] [\#2113](https://github.com/cosmos/cosmos-sdk/issues/2113) Rename `/accounts/{address}/send` to `/bank/accounts/{address}/transfers`, rename `/accounts/{address}` to `/auth/accounts/{address}`, replace `proposal-id` with `proposalId` in all gov endpoints + * [gaia-lite] [\#2478](https://github.com/cosmos/cosmos-sdk/issues/2478) Add query gov proposal's deposits endpoint + * [gaia-lite] [\#2477](https://github.com/cosmos/cosmos-sdk/issues/2477) Add query validator's outgoing redelegations and unbonding delegations endpoints + +* Gaia CLI (`gaiacli`) + * [cli] Cmds to query staking pool and params + * [gov][cli] #2062 added `--proposal` flag to `submit-proposal` that allows a JSON file containing a proposal to be passed in + * [\#2040](https://github.com/cosmos/cosmos-sdk/issues/2040) Add `--bech` to `gaiacli keys show` and respective REST endpoint to + provide desired Bech32 prefix encoding + * [cli] [\#2047](https://github.com/cosmos/cosmos-sdk/issues/2047) [\#2306](https://github.com/cosmos/cosmos-sdk/pull/2306) Passing --gas=simulate triggers a simulation of the tx before the actual execution. + The gas estimate obtained via the simulation will be used as gas limit in the actual execution. + * [cli] [\#2047](https://github.com/cosmos/cosmos-sdk/issues/2047) The --gas-adjustment flag can be used to adjust the estimate obtained via the simulation triggered by --gas=simulate. + * [cli] [\#2110](https://github.com/cosmos/cosmos-sdk/issues/2110) Add --dry-run flag to perform a simulation of a transaction without broadcasting it. The --gas flag is ignored as gas would be automatically estimated. + * [cli] [\#2204](https://github.com/cosmos/cosmos-sdk/issues/2204) Support generating and broadcasting messages with multiple signatures via command line: + * [\#966](https://github.com/cosmos/cosmos-sdk/issues/966) Add --generate-only flag to build an unsigned transaction and write it to STDOUT. + * [\#1953](https://github.com/cosmos/cosmos-sdk/issues/1953) New `sign` command to sign transactions generated with the --generate-only flag. + * [\#1954](https://github.com/cosmos/cosmos-sdk/issues/1954) New `broadcast` command to broadcast transactions generated offline and signed with the `sign` command. + * [cli] [\#2220](https://github.com/cosmos/cosmos-sdk/issues/2220) Add `gaiacli config` feature to interactively create CLI config files to reduce the number of required flags + * [stake][cli] [\#1672](https://github.com/cosmos/cosmos-sdk/issues/1672) Introduced + new commission flags for validator commands `create-validator` and `edit-validator`. + * [stake][cli] [\#1890](https://github.com/cosmos/cosmos-sdk/issues/1890) Add `--genesis-format` flag to `gaiacli tx create-validator` to produce transactions in genesis-friendly format. + * [cli][\#2554](https://github.com/cosmos/cosmos-sdk/issues/2554) Make `gaiacli keys show` multisig ready. + +* Gaia + * [cli] #2170 added ability to show the node's address via `gaiad tendermint show-address` + * [simulation] #2313 Reworked `make test_sim_gaia_slow` to `make test_sim_gaia_full`, now simulates from multiple starting seeds in parallel + * [cli] [\#1921] (https://github.com/cosmos/cosmos-sdk/issues/1921) + * New configuration file `gaiad.toml` is now created to host Gaia-specific configuration. + * New --minimum_fees/minimum_fees flag/config option to set a minimum fee. + +* SDK + * [querier] added custom querier functionality, so ABCI query requests can be handled by keepers + * [simulation] [\#1924](https://github.com/cosmos/cosmos-sdk/issues/1924) allow operations to specify future operations + * [simulation] [\#1924](https://github.com/cosmos/cosmos-sdk/issues/1924) Add benchmarking capabilities, with makefile commands "test_sim_gaia_benchmark, test_sim_gaia_profile" + * [simulation] [\#2349](https://github.com/cosmos/cosmos-sdk/issues/2349) Add time-based future scheduled operations to simulator + * [x/auth] [\#2376](https://github.com/cosmos/cosmos-sdk/issues/2376) Remove FeePayer() from StdTx + * [x/stake] [\#1672](https://github.com/cosmos/cosmos-sdk/issues/1672) Implement + basis for the validator commission model. + * [x/auth] Support account removal in the account mapper. + + +IMPROVEMENTS +* [tools] Improved terraform and ansible scripts for infrastructure deployment +* [tools] Added ansible script to enable process core dumps + +* Gaia REST API (`gaiacli advanced rest-server`) + * [x/stake] [\#2000](https://github.com/cosmos/cosmos-sdk/issues/2000) Added tests for new staking endpoints + * [gaia-lite] [\#2445](https://github.com/cosmos/cosmos-sdk/issues/2445) Standarized REST error responses + * [gaia-lite] Added example to Swagger specification for /keys/seed. + * [x/stake] Refactor REST utils + +* Gaia CLI (`gaiacli`) + * [cli] #2060 removed `--select` from `block` command + * [cli] #2128 fixed segfault when exporting directly after `gaiad init` + * [cli] [\#1255](https://github.com/cosmos/cosmos-sdk/issues/1255) open KeyBase in read-only mode + for query-purpose CLI commands + +* Gaia + * [x/stake] [#2023](https://github.com/cosmos/cosmos-sdk/pull/2023) Terminate iteration loop in `UpdateBondedValidators` and `UpdateBondedValidatorsFull` when the first revoked validator is encountered and perform a sanity check. + * [x/auth] Signature verification's gas cost now accounts for pubkey type. [#2046](https://github.com/tendermint/tendermint/pull/2046) + * [x/stake] [x/slashing] Ensure delegation invariants to jailed validators [#1883](https://github.com/cosmos/cosmos-sdk/issues/1883). + * [x/stake] Improve speed of GetValidator, which was shown to be a performance bottleneck. [#2046](https://github.com/tendermint/tendermint/pull/2200) + * [x/stake] [\#2435](https://github.com/cosmos/cosmos-sdk/issues/2435) Improve memory efficiency of getting the various store keys + * [genesis] [\#2229](https://github.com/cosmos/cosmos-sdk/issues/2229) Ensure that there are no duplicate accounts or validators in the genesis state. + * [genesis] [\#2450](https://github.com/cosmos/cosmos-sdk/issues/2450) Validate staking genesis parameters. + * Add SDK validation to `config.toml` (namely disabling `create_empty_blocks`) [\#1571](https://github.com/cosmos/cosmos-sdk/issues/1571) + * [\#1941](https://github.com/cosmos/cosmos-sdk/issues/1941)(https://github.com/cosmos/cosmos-sdk/issues/1941) Version is now inferred via `git describe --tags`. + * [x/distribution] [\#1671](https://github.com/cosmos/cosmos-sdk/issues/1671) add distribution types and tests + +* SDK + * [tools] Make get_vendor_deps deletes `.vendor-new` directories, in case scratch files are present. + * [spec] Added simple piggy bank distribution spec + * [cli] [\#1632](https://github.com/cosmos/cosmos-sdk/issues/1632) Add integration tests to ensure `basecoind init && basecoind` start sequences run successfully for both `democoin` and `basecoin` examples. + * [store] Speedup IAVL iteration, and consequently everything that requires IAVL iteration. [#2143](https://github.com/cosmos/cosmos-sdk/issues/2143) + * [store] [\#1952](https://github.com/cosmos/cosmos-sdk/issues/1952), [\#2281](https://github.com/cosmos/cosmos-sdk/issues/2281) Update IAVL dependency to v0.11.0 + * [simulation] Make timestamps randomized [#2153](https://github.com/cosmos/cosmos-sdk/pull/2153) + * [simulation] Make logs not just pure strings, speeding it up by a large factor at greater block heights [\#2282](https://github.com/cosmos/cosmos-sdk/issues/2282) + * [simulation] Add a concept of weighting the operations [\#2303](https://github.com/cosmos/cosmos-sdk/issues/2303) + * [simulation] Logs get written to file if large, and also get printed on panics [\#2285](https://github.com/cosmos/cosmos-sdk/issues/2285) + * [simulation] Bank simulations now makes testing auth configurable [\#2425](https://github.com/cosmos/cosmos-sdk/issues/2425) + * [gaiad] [\#1992](https://github.com/cosmos/cosmos-sdk/issues/1992) Add optional flag to `gaiad testnet` to make config directory of daemon (default `gaiad`) and cli (default `gaiacli`) configurable + * [x/stake] Add stake `Queriers` for Gaia-lite endpoints. This increases the staking endpoints performance by reusing the staking `keeper` logic for queries. [#2249](https://github.com/cosmos/cosmos-sdk/pull/2149) + * [store] [\#2017](https://github.com/cosmos/cosmos-sdk/issues/2017) Refactor + gas iterator gas consumption to only consume gas for iterator creation and `Next` + calls which includes dynamic consumption of value length. + * [types/decimal] [\#2378](https://github.com/cosmos/cosmos-sdk/issues/2378) - Added truncate functionality to decimal + * [client] [\#1184](https://github.com/cosmos/cosmos-sdk/issues/1184) Remove unused `client/tx/sign.go`. + * [tools] [\#2464](https://github.com/cosmos/cosmos-sdk/issues/2464) Lock binary dependencies to a specific version + +BUG FIXES + +* Gaia CLI (`gaiacli`) + * [cli] [\#1997](https://github.com/cosmos/cosmos-sdk/issues/1997) Handle panics gracefully when `gaiacli stake {delegation,unbond}` fail to unmarshal delegation. + * [cli] [\#2265](https://github.com/cosmos/cosmos-sdk/issues/2265) Fix JSON formatting of the `gaiacli send` command. + * [cli] [\#2547](https://github.com/cosmos/cosmos-sdk/issues/2547) Mark --to and --amount as required flags for `gaiacli tx send`. + +* Gaia + * [x/stake] Return correct Tendermint validator update set on `EndBlocker` by not + including non previously bonded validators that have zero power. [#2189](https://github.com/cosmos/cosmos-sdk/issues/2189) + +* SDK + * [\#1988](https://github.com/cosmos/cosmos-sdk/issues/1988) Make us compile on OpenBSD (disable ledger) [#1988] (https://github.com/cosmos/cosmos-sdk/issues/1988) + * [\#2105](https://github.com/cosmos/cosmos-sdk/issues/2105) Fix DB Iterator leak, which may leak a go routine. + * [ledger] [\#2064](https://github.com/cosmos/cosmos-sdk/issues/2064) Fix inability to sign and send transactions via the LCD by + loading a Ledger device at runtime. + * [\#2158](https://github.com/cosmos/cosmos-sdk/issues/2158) Fix non-deterministic ordering of validator iteration when slashing in `gov EndBlocker` + * [simulation] [\#1924](https://github.com/cosmos/cosmos-sdk/issues/1924) Make simulation stop on SIGTERM + * [\#2388](https://github.com/cosmos/cosmos-sdk/issues/2388) Remove dependency on deprecated tendermint/tmlibs repository. + * [\#2416](https://github.com/cosmos/cosmos-sdk/issues/2416) Refactored + `InitializeTestLCD` to properly include proposing validator in genesis state. + ## 0.24.2 *August 22nd, 2018* @@ -7,7 +250,7 @@ BUG FIXES * Tendermint - - Fix unbounded consensus WAL growth + - Fix unbounded consensus WAL growth ## 0.24.1 @@ -25,36 +268,36 @@ BUG FIXES BREAKING CHANGES * Gaia REST API (`gaiacli advanced rest-server`) - - [x/stake] \#1880 More REST-ful endpoints (large refactor) - - [x/slashing] \#1866 `/slashing/signing_info` takes cosmosvalpub instead of cosmosvaladdr + - [x/stake] [\#1880](https://github.com/cosmos/cosmos-sdk/issues/1880) More REST-ful endpoints (large refactor) + - [x/slashing] [\#1866](https://github.com/cosmos/cosmos-sdk/issues/1866) `/slashing/signing_info` takes cosmosvalpub instead of cosmosvaladdr - use time.Time instead of int64 for time. See Tendermint v0.23.0 - Signatures are no longer Amino encoded with prefixes (just encoded as raw bytes) - see Tendermint v0.23.0 * Gaia CLI (`gaiacli`) - [x/stake] change `--keybase-sig` to `--identity` - - [x/stake] \#1828 Force user to specify amount on create-validator command by removing default + - [x/stake] [\#1828](https://github.com/cosmos/cosmos-sdk/issues/1828) Force user to specify amount on create-validator command by removing default - [x/gov] Change `--proposalID` to `--proposal-id` - - [x/stake, x/gov] \#1606 Use `--from` instead of adhoc flags like `--address-validator` + - [x/stake, x/gov] [\#1606](https://github.com/cosmos/cosmos-sdk/issues/1606) Use `--from` instead of adhoc flags like `--address-validator` and `--proposer` to indicate the sender address. - - \#1551 Remove `--name` completely + - [\#1551](https://github.com/cosmos/cosmos-sdk/issues/1551) Remove `--name` completely - Genesis/key creation (`gaiad init`) now supports user-provided key passwords * Gaia - [x/stake] Inflation doesn't use rationals in calculation (performance boost) - [x/stake] Persist a map from `addr->pubkey` in the state since BeginBlock doesn't provide pubkeys. - - [x/gov] \#1781 Added tags sub-package, changed tags to use dash-case - - [x/gov] \#1688 Governance parameters are now stored in globalparams store - - [x/gov] \#1859 Slash validators who do not vote on a proposal - - [x/gov] \#1914 added TallyResult type that gets stored in Proposal after tallying is finished + - [x/gov] [\#1781](https://github.com/cosmos/cosmos-sdk/issues/1781) Added tags sub-package, changed tags to use dash-case + - [x/gov] [\#1688](https://github.com/cosmos/cosmos-sdk/issues/1688) Governance parameters are now stored in globalparams store + - [x/gov] [\#1859](https://github.com/cosmos/cosmos-sdk/issues/1859) Slash validators who do not vote on a proposal + - [x/gov] [\#1914](https://github.com/cosmos/cosmos-sdk/issues/1914) added TallyResult type that gets stored in Proposal after tallying is finished * SDK - [baseapp] Msgs are no longer run on CheckTx, removed `ctx.IsCheckTx()` - [baseapp] NewBaseApp constructor takes sdk.TxDecoder as argument instead of wire.Codec - [types] sdk.NewCoin takes sdk.Int, sdk.NewInt64Coin takes int64 - [x/auth] Default TxDecoder can be found in `x/auth` rather than baseapp - - [client] \#1551: Refactored `CoreContext` to `TxContext` and `QueryContext` + - [client] [\#1551](https://github.com/cosmos/cosmos-sdk/issues/1551): Refactored `CoreContext` to `TxContext` and `QueryContext` - Removed all tx related fields and logic (building & signing) to separate structure `TxContext` in `x/auth/client/context` @@ -74,7 +317,7 @@ FEATURES * Gaia CLI (`gaiacli`) - [x/gov] added `query-proposals` command. Can filter by `depositer`, `voter`, and `status` - - [x/stake] \#2043 Added staking query cli cmds for unbonding-delegations and redelegations + - [x/stake] [\#2043](https://github.com/cosmos/cosmos-sdk/issues/2043) Added staking query cli cmds for unbonding-delegations and redelegations * Gaia - [networks] Added ansible scripts to upgrade seed nodes on a network @@ -87,7 +330,7 @@ FEATURES - Simulates Tendermint's algorithm for validator set updates - Simulates validator signing/downtime with a Markov chain, and occaisional double-signatures - Includes simulated operations & invariants for staking, slashing, governance, and bank modules - - [store] \#1481 Add transient store + - [store] [\#1481](https://github.com/cosmos/cosmos-sdk/issues/1481) Add transient store - [baseapp] Initialize validator set on ResponseInitChain - [baseapp] added BaseApp.Seal - ability to seal baseapp parameters once they've been set - [cosmos-sdk-cli] New `cosmos-sdk-cli` tool to quickly initialize a new @@ -97,39 +340,41 @@ FEATURES IMPROVEMENTS * Gaia - - [spec] \#967 Inflation and distribution specs drastically improved - - [x/gov] \#1773 Votes on a proposal can now be queried + - [spec] [\#967](https://github.com/cosmos/cosmos-sdk/issues/967) Inflation and distribution specs drastically improved + - [x/gov] [\#1773](https://github.com/cosmos/cosmos-sdk/issues/1773) Votes on a proposal can now be queried - [x/gov] Initial governance parameters can now be set in the genesis file - - [x/stake] \#1815 Sped up the processing of `EditValidator` txs. - - [config] \#1930 Transactions indexer indexes all tags by default. + - [x/stake] [\#1815](https://github.com/cosmos/cosmos-sdk/issues/1815) Sped up the processing of `EditValidator` txs. + - [config] [\#1930](https://github.com/cosmos/cosmos-sdk/issues/1930) Transactions indexer indexes all tags by default. - [ci] [#2057](https://github.com/cosmos/cosmos-sdk/pull/2057) Run `make localnet-start` on every commit and ensure network reaches at least 10 blocks * SDK - - [baseapp] \#1587 Allow any alphanumeric character in route + - [baseapp] [\#1587](https://github.com/cosmos/cosmos-sdk/issues/1587) Allow any alphanumeric character in route - [baseapp] Allow any alphanumeric character in route - [tools] Remove `rm -rf vendor/` from `make get_vendor_deps` - [x/auth] Recover ErrorOutOfGas panic in order to set sdk.Result attributes correctly + - [x/auth] [\#2376](https://github.com/cosmos/cosmos-sdk/issues/2376) No longer runs any signature in a multi-msg, if any account/sequence number is wrong. + - [x/auth] [\#2376](https://github.com/cosmos/cosmos-sdk/issues/2376) No longer charge gas for subtracting fees - [x/bank] Unit tests are now table-driven - [tests] Add tests to example apps in docs - [tests] Fixes ansible scripts to work with AWS too - - [tests] \#1806 CLI tests are now behind the build flag 'cli_test', so go test works on a new repo + - [tests] [\#1806](https://github.com/cosmos/cosmos-sdk/issues/1806) CLI tests are now behind the build flag 'cli_test', so go test works on a new repo BUG FIXES * Gaia CLI (`gaiacli`) - - \#1766 Fixes bad example for keybase identity - - [x/stake] \#2021 Fixed repeated CLI commands in staking + - [\#1766](https://github.com/cosmos/cosmos-sdk/issues/1766) Fixes bad example for keybase identity + - [x/stake] [\#2021](https://github.com/cosmos/cosmos-sdk/issues/2021) Fixed repeated CLI commands in staking * Gaia - [x/stake] [#2077](https://github.com/cosmos/cosmos-sdk/pull/2077) Fixed invalid cliff power comparison - - \#1804 Fixes gen-tx genesis generation logic temporarily until upstream updates - - \#1799 Fix `gaiad export` - - \#1839 Fixed bug where intra-tx counter wasn't set correctly for genesis validators - - [x/stake] \#1858 Fixed bug where the cliff validator was not updated correctly - - [tests] \#1675 Fix non-deterministic `test_cover` - - [tests] \#1551 Fixed invalid LCD test JSON payload in `doIBCTransfer` + - [\#1804](https://github.com/cosmos/cosmos-sdk/issues/1804) Fixes gen-tx genesis generation logic temporarily until upstream updates + - [\#1799](https://github.com/cosmos/cosmos-sdk/issues/1799) Fix `gaiad export` + - [\#1839](https://github.com/cosmos/cosmos-sdk/issues/1839) Fixed bug where intra-tx counter wasn't set correctly for genesis validators + - [x/stake] [\#1858](https://github.com/cosmos/cosmos-sdk/issues/1858) Fixed bug where the cliff validator was not updated correctly + - [tests] [\#1675](https://github.com/cosmos/cosmos-sdk/issues/1675) Fix non-deterministic `test_cover` + - [tests] [\#1551](https://github.com/cosmos/cosmos-sdk/issues/1551) Fixed invalid LCD test JSON payload in `doIBCTransfer` - [basecoin] Fixes coin transaction failure and account query [discussion](https://forum.cosmos.network/t/unmarshalbinarybare-expected-to-read-prefix-bytes-75fbfab8-since-it-is-registered-concrete-but-got-0a141dfa/664/6) - - [x/gov] \#1757 Fix VoteOption conversion to String + - [x/gov] [\#1757](https://github.com/cosmos/cosmos-sdk/issues/1757) Fix VoteOption conversion to String * [x/stake] [#2083] Fix broken invariant of bonded validator power decrease ## 0.23.1 @@ -157,9 +402,9 @@ IMPROVEMENTS BUG FIXES * [tendermint] Update to v0.22.6 - Fixes some security vulnerabilities reported in the [Bug Bounty](https://hackerone.com/tendermint) -* \#1797 Fix off-by-one error in slashing for downtime -* \#1787 Fixed bug where Tally fails due to revoked/unbonding validator -* \#1666 Add intra-tx counter to the genesis validators +* [\#1797](https://github.com/cosmos/cosmos-sdk/issues/1797) Fix off-by-one error in slashing for downtime +* [\#1787](https://github.com/cosmos/cosmos-sdk/issues/1787) Fixed bug where Tally fails due to revoked/unbonding validator +* [\#1666](https://github.com/cosmos/cosmos-sdk/issues/1666) Add intra-tx counter to the genesis validators ## 0.22.0 @@ -205,8 +450,8 @@ IMPROVEMENTS * [store] Pruning strategy configurable with pruning flag on gaiad start BUG FIXES -* \#1630 - redelegation nolonger removes tokens from the delegator liquid account -* [keys] \#1629 - updating password no longer asks for a new password when the first entered password was incorrect +* [\#1630](https://github.com/cosmos/cosmos-sdk/issues/1630) - redelegation nolonger removes tokens from the delegator liquid account +* [keys] [\#1629](https://github.com/cosmos/cosmos-sdk/issues/1629) - updating password no longer asks for a new password when the first entered password was incorrect * [lcd] importing an account would create a random account * [server] 'gaiad init' command family now writes provided name as the moniker in `config.toml` * [build] Added Ledger build support via `LEDGER_ENABLED=true|false` @@ -322,9 +567,9 @@ IMPROVEMENTS * [docs] Added commands for governance CLI on testnet README BUG FIXES -* [x/slashing] \#1510 Unrevoked validators cannot un-revoke themselves -* [x/stake] \#1513 Validators slashed to zero power are unbonded and removed from the store -* [x/stake] \#1567 Validators decreased in power but not unbonded are now updated in Tendermint +* [x/slashing] [\#1510](https://github.com/cosmos/cosmos-sdk/issues/1510) Unrevoked validators cannot un-revoke themselves +* [x/stake] [\#1513](https://github.com/cosmos/cosmos-sdk/issues/1513) Validators slashed to zero power are unbonded and removed from the store +* [x/stake] [\#1567](https://github.com/cosmos/cosmos-sdk/issues/1567) Validators decreased in power but not unbonded are now updated in Tendermint * [x/stake] error strings lower case * [x/stake] pool loose tokens now accounts for unbonding and unbonding tokens not associated with any validator * [x/stake] fix revoke bytes ordering (was putting revoked candidates at the top of the list) @@ -334,20 +579,20 @@ BUG FIXES * Retry on HTTP request failure in CLI tests, add option to retry tests in Makefile * Fixed bug where chain ID wasn't passed properly in x/bank REST handler, removed Viper hack from ante handler * Fixed bug where `democli account` didn't decode the account data correctly -* \#872 - recovery phrases no longer all end in `abandon` -* \#887 - limit the size of rationals that can be passed in from user input -* \#1052 - Make all now works -* \#1258 - printing big.rat's can no longer overflow int64 -* \#1259 - fix bug where certain tests that could have a nil pointer in defer -* \#1343 - fixed unnecessary parallelism in CI -* \#1353 - CLI: Show pool shares fractions in human-readable format -* \#1367 - set ChainID in InitChain -* \#1461 - CLI tests now no longer reset your local environment data -* \#1505 - `gaiacli stake validator` no longer panics if validator doesn't exist -* \#1565 - fix cliff validator persisting when validator set shrinks from max -* \#1287 - prevent zero power validators at genesis +* [\#872](https://github.com/cosmos/cosmos-sdk/issues/872) - recovery phrases no longer all end in `abandon` +* [\#887](https://github.com/cosmos/cosmos-sdk/issues/887) - limit the size of rationals that can be passed in from user input +* [\#1052](https://github.com/cosmos/cosmos-sdk/issues/1052) - Make all now works +* [\#1258](https://github.com/cosmos/cosmos-sdk/issues/1258) - printing big.rat's can no longer overflow int64 +* [\#1259](https://github.com/cosmos/cosmos-sdk/issues/1259) - fix bug where certain tests that could have a nil pointer in defer +* [\#1343](https://github.com/cosmos/cosmos-sdk/issues/1343) - fixed unnecessary parallelism in CI +* [\#1353](https://github.com/cosmos/cosmos-sdk/issues/1353) - CLI: Show pool shares fractions in human-readable format +* [\#1367](https://github.com/cosmos/cosmos-sdk/issues/1367) - set ChainID in InitChain +* [\#1461](https://github.com/cosmos/cosmos-sdk/issues/1461) - CLI tests now no longer reset your local environment data +* [\#1505](https://github.com/cosmos/cosmos-sdk/issues/1505) - `gaiacli stake validator` no longer panics if validator doesn't exist +* [\#1565](https://github.com/cosmos/cosmos-sdk/issues/1565) - fix cliff validator persisting when validator set shrinks from max +* [\#1287](https://github.com/cosmos/cosmos-sdk/issues/1287) - prevent zero power validators at genesis * [x/stake] fix bug when unbonding/redelegating using `--shares-percent` -* \#1010 - two validators can't bond with the same pubkey anymore +* [\#1010](https://github.com/cosmos/cosmos-sdk/issues/1010) - two validators can't bond with the same pubkey anymore ## 0.19.0 diff --git a/CODEOWNERS b/CODEOWNERS deleted file mode 100644 index 2992bcf66..000000000 --- a/CODEOWNERS +++ /dev/null @@ -1,2 +0,0 @@ -* @jaekwon -* @ebuchman diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7b7ad81fd..3a7dca210 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -133,6 +133,17 @@ Libraries need not follow the model strictly, but would be wise to. The SDK utilizes [semantic versioning](https://semver.org/). +### PR Targeting + +Ensure that you base and target your PR on the correct branch: + - `release/vxx.yy.zz` for a merge into a release candidate + - `master` for a merge of a release + - `develop` in the usual case + +All feature additions should be targeted against `develop`. Bug fixes for an outstanding release candidate +should be targeted against the release candidate branch. Release candidate branches themselves should be the +only pull requests targeted directly against master. + ### Development Procedure: - the latest state of development is on `develop` - `develop` must never fail `make test` or `make test_cli` diff --git a/Gopkg.lock b/Gopkg.lock index 5f988fd7a..f6e6fd658 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -1,6 +1,14 @@ # This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. +[[projects]] + branch = "master" + digest = "1:7736fc6da04620727f8f3aa2ced8d77be8e074a302820937aa5993848c769b27" + name = "github.com/ZondaX/hid-go" + packages = ["."] + pruneopts = "UT" + revision = "48b08affede2cea076a3cf13b2e3f72ed262b743" + [[projects]] digest = "1:09a7f74eb6bb3c0f14d8926610c87f569c5cff68e978d30e9a3540aeb626fdf0" name = "github.com/bartekn/go-bip39" @@ -26,19 +34,11 @@ [[projects]] branch = "master" - digest = "1:70f6b224a59b2fa453debffa85c77f71063d8754b90c8c4fbad5794e2c382b0f" - name = "github.com/brejski/hid" - packages = ["."] - pruneopts = "UT" - revision = "06112dcfcc50a7e0e4fd06e17f9791e788fdaafc" - -[[projects]] - branch = "master" - digest = "1:2c00f064ba355903866cbfbf3f7f4c0fe64af6638cc7d1b8bdcf3181bc67f1d8" + digest = "1:c0decf632843204d2b8781de7b26e7038584e2dcccc7e2f401e88ae85b1df2b7" name = "github.com/btcsuite/btcd" packages = ["btcec"] pruneopts = "UT" - revision = "f899737d7f2764dc13e4d01ff00108ec58f766a9" + revision = "2a560b2036bee5e3679ec2133eb6520b2f195213" [[projects]] digest = "1:386de157f7d19259a7f9c81f26ce011223ce0f090353c1152ffdf730d7d10ac2" @@ -47,6 +47,13 @@ pruneopts = "UT" revision = "d4cc87b860166d00d6b5b9e0d3b3d71d6088d4d4" +[[projects]] + digest = "1:e8a3550c8786316675ff54ad6f09d265d129c9d986919af7f541afba50d87ce2" + name = "github.com/cosmos/go-bip39" + packages = ["."] + pruneopts = "UT" + revision = "52158e4697b87de16ed390e1bdaf813e581008fa" + [[projects]] digest = "1:a2c1d0e43bd3baaa071d1b9ed72c27d78169b2b269f71c105ac4ba34b1be4a39" name = "github.com/davecgh/go-spew" @@ -55,13 +62,6 @@ revision = "346938d642f2ec3594ed81d874461961cd0faa76" version = "v1.1.0" -[[projects]] - digest = "1:c7644c73a3d23741fdba8a99b1464e021a224b7e205be497271a8003a15ca41b" - name = "github.com/ebuchman/fail-test" - packages = ["."] - pruneopts = "UT" - revision = "95f809107225be108efcf10a3509e4ea6ceef3c4" - [[projects]] digest = "1:abeb38ade3f32a92943e5be54f55ed6d6e3b6602761d74b4aab4c9dd45c18abd" name = "github.com/fsnotify/fsnotify" @@ -95,12 +95,12 @@ version = "v0.3.0" [[projects]] - digest = "1:c4a2528ccbcabf90f9f3c464a5fc9e302d592861bbfd0b7135a7de8a943d0406" + digest = "1:586ea76dbd0374d6fb649a91d70d652b7fe0ccffb8910a77468e7702e7901f3d" name = "github.com/go-stack/stack" packages = ["."] pruneopts = "UT" - revision = "259ab82a6cad3992b4e21ff5cac294ccb06474bc" - version = "v1.7.0" + revision = "2fee6af1a9795aafbe0253a0cfbdf668e1fb8a9a" + version = "v1.8.0" [[projects]] digest = "1:35621fe20f140f05a0c4ef662c26c0ab4ee50bca78aa30fe87d33120bd28165e" @@ -164,8 +164,18 @@ version = "v1.2.0" [[projects]] - branch = "master" - digest = "1:12247a2e99a060cc692f6680e5272c8adf0b8f572e6bce0d7095e624c958a240" + digest = "1:8ec8d88c248041a6df5f6574b87bc00e7e0b493881dad2e7ef47b11dc69093b5" + name = "github.com/hashicorp/golang-lru" + packages = [ + ".", + "simplelru", + ] + pruneopts = "UT" + revision = "20f1fb78b0740ba8c3cb143a61e86ba5c8669768" + version = "v0.5.0" + +[[projects]] + digest = "1:ea40c24cdbacd054a6ae9de03e62c5f252479b96c716375aace5c120d68647c8" name = "github.com/hashicorp/hcl" packages = [ ".", @@ -179,7 +189,8 @@ "json/token", ] pruneopts = "UT" - revision = "ef8a98b0bbce4a65b5aa4c368430a80ddc533168" + revision = "8cb6e5b959231cc1119e43259c4a608f9c51a241" + version = "v1.0.0" [[projects]] digest = "1:870d441fe217b8e689d7949fef6e43efbc787e50f200cb1e70dbca9204a1d6be" @@ -214,12 +225,12 @@ version = "v1.8.0" [[projects]] - digest = "1:d4d17353dbd05cb52a2a52b7fe1771883b682806f68db442b436294926bbfafb" + digest = "1:0981502f9816113c9c8c4ac301583841855c8cf4da8c72f696b3ebedf6d0e4e5" name = "github.com/mattn/go-isatty" packages = ["."] pruneopts = "UT" - revision = "0360b2af4f38e8d38c7fce2a9f4e702702d73a39" - version = "v0.0.3" + revision = "6ca4dbf54d38eea1a992b3c722a76a5d1c4cb25c" + version = "v0.0.4" [[projects]] digest = "1:ff5ebae34cfbf047d505ee150de27e60570e8c394b3b8fdbb720ff6ac71985fc" @@ -230,12 +241,20 @@ version = "v1.0.1" [[projects]] - branch = "master" - digest = "1:5ab79470a1d0fb19b041a624415612f8236b3c06070161a910562f2b2d064355" + digest = "1:78bbb1ba5b7c3f2ed0ea1eab57bdd3859aec7e177811563edc41198a760b06af" + name = "github.com/mitchellh/go-homedir" + packages = ["."] + pruneopts = "UT" + revision = "ae18d6b8b3205b561c79e8e5f69bff09736185f4" + version = "v1.0.0" + +[[projects]] + digest = "1:e32dfc6abff6a3633ef4d9a1022fd707c8ef26f1e1e8f855dc58dc415ce7c8f3" name = "github.com/mitchellh/mapstructure" packages = ["."] pruneopts = "UT" - revision = "f15292f7a699fcc1a38a80977f80a046874ba8ac" + revision = "fe40af7a9c397fa3ddba203c38a5042c5d0475ad" + version = "v1.1.1" [[projects]] digest = "1:95741de3af260a92cc5c7f3f3061e85273f5a81b5db20d4bd68da74bd521675e" @@ -262,14 +281,16 @@ version = "v1.0.0" [[projects]] - digest = "1:c1a04665f9613e082e1209cf288bf64f4068dcd6c87a64bf1c4ff006ad422ba0" + digest = "1:93a746f1060a8acbcf69344862b2ceced80f854170e1caae089b2834c5fbf7f4" name = "github.com/prometheus/client_golang" packages = [ "prometheus", + "prometheus/internal", "prometheus/promhttp", ] pruneopts = "UT" - revision = "ae27198cdd90bf12cd134ad79d1366a6cf49f632" + revision = "505eaef017263e299324067d40ca2c48f6a2cf50" + version = "v0.9.2" [[projects]] branch = "master" @@ -293,7 +314,7 @@ [[projects]] branch = "master" - digest = "1:8c49953a1414305f2ff5465147ee576dd705487c35b15918fcd4efdc0cb7a290" + digest = "1:ef1dd9945e58ee9b635273d28c0ef3fa3742a7dedc038ebe207fd63e6ce000ef" name = "github.com/prometheus/procfs" packages = [ ".", @@ -302,7 +323,15 @@ "xfs", ] pruneopts = "UT" - revision = "05ee40e3a273f7245e8777337fc7b46e533a9a92" + revision = "418d78d0b9a7b7de3a6bbc8a23def624cc977bb2" + +[[projects]] + digest = "1:ea0700160aca4ef099f4e06686a665a87691f4248dddd40796925eda2e46bd64" + name = "github.com/rakyll/statik" + packages = ["fs"] + pruneopts = "UT" + revision = "aa8a7b1baecd0f31a436bf7956fcdcc609a83035" + version = "v0.1.4" [[projects]] digest = "1:c4556a44e350b50a490544d9b06e9fba9c286c21d6c0e47f54f3a9214597298c" @@ -312,15 +341,23 @@ revision = "e2704e165165ec55d062f5919b4b29494e9fa790" [[projects]] - digest = "1:bd1ae00087d17c5a748660b8e89e1043e1e5479d0fea743352cda2f8dd8c4f84" + digest = "1:b0c25f00bad20d783d259af2af8666969e2fc343fa0dc9efe52936bbd67fb758" + name = "github.com/rs/cors" + packages = ["."] + pruneopts = "UT" + revision = "9a47f48565a795472d43519dd49aac781f3034fb" + version = "v1.6.0" + +[[projects]] + digest = "1:6a4a11ba764a56d2758899ec6f3848d24698d48442ebce85ee7a3f63284526cd" name = "github.com/spf13/afero" packages = [ ".", "mem", ] pruneopts = "UT" - revision = "787d034dfe70e44075ccc060d346146ef53270ad" - version = "v1.1.1" + revision = "d40851caa0d747393da1ffb28f7f9d8b4eeffebd" + version = "v1.1.2" [[projects]] digest = "1:516e71bed754268937f57d4ecb190e01958452336fa73dbac880894164e91c1f" @@ -339,12 +376,12 @@ version = "v0.0.1" [[projects]] - branch = "master" - digest = "1:8a020f916b23ff574845789daee6818daf8d25a4852419aae3f0b12378ba432a" + digest = "1:68ea4e23713989dc20b1bded5d9da2c5f9be14ff9885beef481848edd18c26cb" name = "github.com/spf13/jwalterweatherman" packages = ["."] pruneopts = "UT" - revision = "14d3d4c518341bea657dd8a226f5121c0ff8c9f2" + revision = "4a4406e478ca629068e7768fc33f3f044173c0a6" + version = "v1.0.0" [[projects]] digest = "1:dab83a1bbc7ad3d7a6ba1a1cc1760f25ac38cdf7d96a5cdd55cd915a4f5ceaf9" @@ -374,8 +411,7 @@ version = "v1.2.1" [[projects]] - branch = "master" - digest = "1:f2ffd421680b0a3f7887501b3c6974bcf19217ecd301d0e2c9b681940ec363d5" + digest = "1:b3cfb8d82b1601a846417c3f31c03a7961862cb2c98dcf0959c473843e6d9a2b" name = "github.com/syndtr/goleveldb" packages = [ "leveldb", @@ -392,42 +428,40 @@ "leveldb/util", ] pruneopts = "UT" - revision = "ae2bd5eed72d46b28834ec3f60db3a3ebedd8dbd" + revision = "c4c61651e9e37fa117f53c5a906d3b63090d8445" [[projects]] - branch = "master" - digest = "1:087aaa7920e5d0bf79586feb57ce01c35c830396ab4392798112e8aae8c47722" - name = "github.com/tendermint/ed25519" - packages = [ - ".", - "edwards25519", - "extra25519", - ] + digest = "1:83f5e189eea2baad419a6a410984514266ff690075759c87e9ede596809bd0b8" + name = "github.com/tendermint/btcd" + packages = ["btcec"] pruneopts = "UT" - revision = "d8387025d2b9d158cf4efb07e7ebf814bcce2057" + revision = "80daadac05d1cd29571fccf27002d79667a88b58" + version = "v0.1.1" [[projects]] - digest = "1:e0a2a4be1e20c305badc2b0a7a9ab7fef6da500763bec23ab81df3b5f9eec9ee" + digest = "1:ad9c4c1a4e7875330b1f62906f2830f043a23edb5db997e3a5ac5d3e6eadf80a" name = "github.com/tendermint/go-amino" packages = ["."] pruneopts = "UT" - revision = "a8328986c1608950fa5d3d1c0472cccc4f8fc02c" - version = "v0.12.0-rc0" + revision = "dc14acf9ef15f85828bfbc561ed9dd9d2a284885" + version = "v0.14.1" [[projects]] - digest = "1:d4a15d404afbf591e8be16fcda7f5ac87948d5c7531f9d909fd84cc730ab16e2" + digest = "1:e1cc8dd891e64aab63b0c09f2f12456cbe2cd9cbd9d96dfae3281245f05c2428" name = "github.com/tendermint/iavl" packages = ["."] pruneopts = "UT" - revision = "35f66e53d9b01e83b30de68b931f54b2477a94c9" - version = "v0.9.2" + revision = "de0740903a67b624d887f9055d4c60175dcfa758" + version = "v0.12.0" [[projects]] - digest = "1:4f15e95fe3888cc75dd34f407d6394cbc7fd3ff24920851b92b295f6a8b556e6" + branch = "upgrade281" + digest = "1:171062a60aed2126606047d1eb1db7bc3211f8558429c4933826a7b563f153bf" name = "github.com/tendermint/tendermint" packages = [ "abci/client", "abci/example/code", + "abci/example/counter", "abci/example/kvstore", "abci/server", "abci/types", @@ -441,6 +475,8 @@ "crypto/ed25519", "crypto/encoding/amino", "crypto/merkle", + "crypto/multisig", + "crypto/multisig/bitarray", "crypto/secp256k1", "crypto/tmhash", "crypto/xsalsa20symmetric", @@ -453,6 +489,7 @@ "libs/common", "libs/db", "libs/events", + "libs/fail", "libs/flowrate", "libs/log", "libs/pubsub", @@ -460,7 +497,6 @@ "lite", "lite/client", "lite/errors", - "lite/files", "lite/proxy", "mempool", "node", @@ -474,7 +510,6 @@ "rpc/core", "rpc/core/types", "rpc/grpc", - "rpc/lib", "rpc/lib/client", "rpc/lib/server", "rpc/lib/types", @@ -483,27 +518,31 @@ "state/txindex/kv", "state/txindex/null", "types", + "types/time", "version", ] pruneopts = "UT" - revision = "81df19e68ab1519399fccf0cab81cb75bf9d782e" - version = "v0.23.1-rc0" + revision = "e92d05af5852aacc17c0c3ae32c443753e032fd4" + source = "github.com/BiJie/bnc-tendermint" [[projects]] - digest = "1:4dcb0dd65feecb068ce23a234d1a07c7868a1e39f52a6defcae0bb371d03abf6" + digest = "1:7886f86064faff6f8d08a3eb0e8c773648ff5a2e27730831e2bfbf07467f6666" name = "github.com/zondax/ledger-goclient" packages = ["."] pruneopts = "UT" - revision = "4296ee5701e945f9b3a7dbe51f402e0b9be57259" + revision = "58598458c11bc0ad1c1b8dac3dc3e11eaf270b79" + version = "v0.1.0" [[projects]] - branch = "master" - digest = "1:7a71fffde456d746c52f9cd09c50b034533a3180fb1f6320abb149f2ccc579e5" + digest = "1:6f6dc6060c4e9ba73cf28aa88f12a69a030d3d19d518ef8e931879eaa099628d" name = "golang.org/x/crypto" packages = [ + "bcrypt", "blowfish", "chacha20poly1305", "curve25519", + "ed25519", + "ed25519/internal/edwards25519", "hkdf", "internal/chacha20", "internal/subtle", @@ -517,7 +556,8 @@ "salsa20/salsa", ] pruneopts = "UT" - revision = "de0752318171da717af4ce24d0a2e8626afaeb11" + revision = "3764759f34a542a3aef74d6b02e35be7ab893bba" + source = "https://github.com/tendermint/crypto" [[projects]] digest = "1:d36f55a999540d29b6ea3c2ea29d71c76b1d9853fdcd3e5c5cb4836f2ba118f1" @@ -536,15 +576,14 @@ revision = "292b43bbf7cb8d35ddf40f8d5100ef3837cced3f" [[projects]] - branch = "master" - digest = "1:a989b95f72fce8876213e8e20492525b4cf69a9e7fee7f1d9897983ee0d547e9" + digest = "1:4bd75b1a219bc590b05c976bbebf47f4e993314ebb5c7cbf2efe05a09a184d54" name = "golang.org/x/sys" packages = [ "cpu", "unix", ] pruneopts = "UT" - revision = "1c9583448a9c3aa0f9a6a5241bf73c0bd8aafded" + revision = "4e1fef5609515ec7a2cee7b5de30ba6d9b438cbf" [[projects]] digest = "1:a2ab62866c75542dd18d2b069fec854577a20211d7c0ea6ae746072a1dccdd18" @@ -570,12 +609,11 @@ version = "v0.3.0" [[projects]] - branch = "master" digest = "1:077c1c599507b3b3e9156d17d36e1e61928ee9b53a5b420f10f28ebd4a0b275c" name = "google.golang.org/genproto" packages = ["googleapis/rpc/status"] pruneopts = "UT" - revision = "d0a8f471bba2dbb160885b0000d814ee5d559bad" + revision = "383e8b2c3b9e36c4076b235b32537292176bae20" [[projects]] digest = "1:2dab32a43451e320e49608ff4542fdfc653c95dcc35d0065ec9c6c3dd540ed74" @@ -626,19 +664,27 @@ "github.com/bartekn/go-bip39", "github.com/bgentry/speakeasy", "github.com/btcsuite/btcd/btcec", + "github.com/cosmos/go-bip39", "github.com/golang/protobuf/proto", "github.com/gorilla/mux", + "github.com/hashicorp/golang-lru", "github.com/mattn/go-isatty", + "github.com/mitchellh/go-homedir", + "github.com/pelletier/go-toml", "github.com/pkg/errors", + "github.com/rakyll/statik/fs", "github.com/spf13/cobra", "github.com/spf13/pflag", "github.com/spf13/viper", "github.com/stretchr/testify/assert", "github.com/stretchr/testify/require", + "github.com/syndtr/goleveldb/leveldb/opt", "github.com/tendermint/go-amino", "github.com/tendermint/iavl", + "github.com/tendermint/tendermint/abci/client", "github.com/tendermint/tendermint/abci/server", "github.com/tendermint/tendermint/abci/types", + "github.com/tendermint/tendermint/blockchain", "github.com/tendermint/tendermint/cmd/tendermint/commands", "github.com/tendermint/tendermint/config", "github.com/tendermint/tendermint/crypto", @@ -646,6 +692,7 @@ "github.com/tendermint/tendermint/crypto/ed25519", "github.com/tendermint/tendermint/crypto/encoding/amino", "github.com/tendermint/tendermint/crypto/merkle", + "github.com/tendermint/tendermint/crypto/multisig", "github.com/tendermint/tendermint/crypto/secp256k1", "github.com/tendermint/tendermint/crypto/tmhash", "github.com/tendermint/tendermint/crypto/xsalsa20symmetric", @@ -655,6 +702,9 @@ "github.com/tendermint/tendermint/libs/common", "github.com/tendermint/tendermint/libs/db", "github.com/tendermint/tendermint/libs/log", + "github.com/tendermint/tendermint/lite", + "github.com/tendermint/tendermint/lite/errors", + "github.com/tendermint/tendermint/lite/proxy", "github.com/tendermint/tendermint/node", "github.com/tendermint/tendermint/p2p", "github.com/tendermint/tendermint/privval", @@ -666,8 +716,7 @@ "github.com/tendermint/tendermint/types", "github.com/tendermint/tendermint/version", "github.com/zondax/ledger-goclient", - "golang.org/x/crypto/blowfish", - "golang.org/x/crypto/ripemd160", + "golang.org/x/crypto/bcrypt", ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml index 4368699b6..3b02265a2 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -49,24 +49,67 @@ [[override]] name = "github.com/tendermint/go-amino" - version = "=v0.12.0-rc0" + version = "=v0.14.1" [[override]] name = "github.com/tendermint/iavl" - version = "=v0.9.2" + version = "=v0.12.0" [[override]] name = "github.com/tendermint/tendermint" - version = "=v0.23.1-rc0" + source = "github.com/BiJie/bnc-tendermint" + version = "=v0.29.1-br0" + +## deps without releases: + +[[override]] + name = "golang.org/x/crypto" + source = "https://github.com/tendermint/crypto" + revision = "3764759f34a542a3aef74d6b02e35be7ab893bba" [[constraint]] - name = "github.com/bartekn/go-bip39" - revision = "a05967ea095d81c8fe4833776774cfaff8e5036c" + name = "github.com/cosmos/go-bip39" + revision = "52158e4697b87de16ed390e1bdaf813e581008fa" [[constraint]] name = "github.com/zondax/ledger-goclient" - revision = "4296ee5701e945f9b3a7dbe51f402e0b9be57259" + version = "=v0.1.0" + +## transitive deps, with releases: + +[[override]] + name = "github.com/davecgh/go-spew" + version = "=v1.1.0" + +[[constraint]] + name = "github.com/rakyll/statik" + version = "=v0.1.4" + +[[constraint]] + name = "github.com/mitchellh/go-homedir" + version = "1.0.0" + +[[constraint]] + name = "github.com/hashicorp/golang-lru" + version = "0.5.0" + +## transitive deps, without releases: +# + +[[override]] + name = "github.com/syndtr/goleveldb" + revision = "c4c61651e9e37fa117f53c5a906d3b63090d8445" + +[[override]] + name = "golang.org/x/sys" + revision = "4e1fef5609515ec7a2cee7b5de30ba6d9b438cbf" + +[[override]] + name = "google.golang.org/genproto" + revision = "383e8b2c3b9e36c4076b235b32537292176bae20" [prune] go-tests = true unused-packages = true + + diff --git a/Makefile b/Makefile index 5a8dd82fe..ec02f1402 100644 --- a/Makefile +++ b/Makefile @@ -1,10 +1,15 @@ PACKAGES_NOSIMULATION=$(shell go list ./... | grep -v '/simulation') PACKAGES_SIMTEST=$(shell go list ./... | grep '/simulation') -COMMIT_HASH := $(shell git rev-parse --short HEAD) +VERSION := $(shell git describe --tags --long | sed 's/v\(.*\)/\1/') BUILD_TAGS = netgo ledger -BUILD_FLAGS = -tags "${BUILD_TAGS}" -ldflags "-X github.com/cosmos/cosmos-sdk/version.GitCommit=${COMMIT_HASH}" +BUILD_FLAGS = -tags "${BUILD_TAGS}" -ldflags "-X github.com/cosmos/cosmos-sdk/version.Version=${VERSION}" GCC := $(shell command -v gcc 2> /dev/null) LEDGER_ENABLED ?= true +UNAME_S := $(shell uname -s) +GOTOOLS = \ + github.com/golang/dep/cmd/dep \ + github.com/alecthomas/gometalinter \ + github.com/rakyll/statik all: get_tools get_vendor_deps install install_examples install_cosmos-sdk-cli test_lint test ######################################## @@ -17,15 +22,21 @@ ci: get_tools get_vendor_deps install test_cover test_lint test check-ledger: ifeq ($(LEDGER_ENABLED),true) -ifndef GCC -$(error "gcc not installed for ledger support, please install") -endif + ifeq ($(UNAME_S),OpenBSD) + $(info "OpenBSD detected, disabling ledger support (https://github.com/cosmos/cosmos-sdk/issues/1988)") +TMP_BUILD_TAGS := $(BUILD_TAGS) +BUILD_TAGS = $(filter-out ledger, $(TMP_BUILD_TAGS)) + else + ifndef GCC + $(error "gcc not installed for ledger support, please install or set LEDGER_ENABLED to false in the Makefile") + endif + endif else TMP_BUILD_TAGS := $(BUILD_TAGS) BUILD_TAGS = $(filter-out ledger, $(TMP_BUILD_TAGS)) endif -build: check-ledger +build: check-ledger update_gaia_lite_docs ifeq ($(OS),Windows_NT) go build $(BUILD_FLAGS) -o build/gaiad.exe ./cmd/gaia/cmd/gaiad go build $(BUILD_FLAGS) -o build/gaiacli.exe ./cmd/gaia/cmd/gaiacli @@ -37,6 +48,9 @@ endif build-linux: LEDGER_ENABLED=false GOOS=linux GOARCH=amd64 $(MAKE) build +update_gaia_lite_docs: + @statik -src=client/lcd/swagger-ui -dest=client/lcd -f + build_cosmos-sdk-cli: ifeq ($(OS),Windows_NT) go build $(BUILD_FLAGS) -o build/cosmos-sdk-cli.exe ./cmd/cosmos-sdk-cli @@ -57,7 +71,7 @@ else go build $(BUILD_FLAGS) -o build/democli ./examples/democoin/cmd/democli endif -install: check-ledger +install: check-ledger update_gaia_lite_docs go install $(BUILD_FLAGS) ./cmd/gaia/cmd/gaiad go install $(BUILD_FLAGS) ./cmd/gaia/cmd/gaiacli @@ -81,25 +95,36 @@ dist: ### Tools & dependencies check_tools: - cd tools && $(MAKE) check_tools - -check_dev_tools: - cd tools && $(MAKE) check_dev_tools + @# https://stackoverflow.com/a/25668869 + @echo "Found tools: $(foreach tool,$(notdir $(GOTOOLS)),\ + $(if $(shell which $(tool)),$(tool),$(error "No $(tool) in PATH")))" update_tools: - cd tools && $(MAKE) update_tools + @echo "--> Updating tools to correct version" + ./scripts/get_tools.sh update_dev_tools: - cd tools && $(MAKE) update_dev_tools + @echo "--> Downloading linters (this may take awhile)" + $(GOPATH)/src/github.com/alecthomas/gometalinter/scripts/install.sh -b $(GOBIN) + go get -u github.com/tendermint/lint/golint get_tools: - cd tools && $(MAKE) get_tools + @echo "--> Installing tools" + ./scripts/get_tools.sh get_dev_tools: - cd tools && $(MAKE) get_dev_tools + @echo "--> Downloading linters (this may take awhile)" + $(GOPATH)/src/github.com/alecthomas/gometalinter/scripts/install.sh -b $(GOBIN) + go get github.com/tendermint/lint/golint get_vendor_deps: + @echo "--> Generating vendor directory via dep ensure" + @rm -rf .vendor-new + @dep ensure -v -vendor-only + +update_vendor_deps: @echo "--> Running dep ensure" + @rm -rf .vendor-new @dep ensure -v draw_deps: @@ -124,11 +149,15 @@ test: test_unit test_cli: @go test -count 1 -p 1 `go list github.com/cosmos/cosmos-sdk/cmd/gaia/cli_test` -tags=cli_test +test_examples: + @go test -count 1 -p 1 `go list github.com/cosmos/cosmos-sdk/examples/basecoin/cli_test` -tags=cli_test + @go test -count 1 -p 1 `go list github.com/cosmos/cosmos-sdk/examples/democoin/cli_test` -tags=cli_test + test_unit: - @go test $(PACKAGES_NOSIMULATION) + @VERSION=$(VERSION) go test $(PACKAGES_NOSIMULATION) test_race: - @go test -race $(PACKAGES_NOSIMULATION) + @VERSION=$(VERSION) go test -race $(PACKAGES_NOSIMULATION) test_sim_modules: @echo "Running individual module simulations..." @@ -140,25 +169,36 @@ test_sim_gaia_nondeterminism: test_sim_gaia_fast: @echo "Running quick Gaia simulation. This may take several minutes..." - @go test ./cmd/gaia/app -run TestFullGaiaSimulation -SimulationEnabled=true -SimulationNumBlocks=200 -timeout 24h + @go test ./cmd/gaia/app -run TestFullGaiaSimulation -SimulationEnabled=true -SimulationNumBlocks=400 -SimulationBlockSize=200 -SimulationCommit=true -v -timeout 24h + +test_sim_gaia_multi_seed: + @echo "Running multi-seed Gaia simulation. This may take awhile!" + @bash scripts/multisim.sh 10 + +SIM_NUM_BLOCKS ?= 210 +SIM_BLOCK_SIZE ?= 200 +SIM_COMMIT ?= true +test_sim_gaia_benchmark: + @echo "Running Gaia benchmark for numBlocks=$(SIM_NUM_BLOCKS), blockSize=$(SIM_BLOCK_SIZE). This may take awhile!" + @go test -benchmem -run=^$$ github.com/cosmos/cosmos-sdk/cmd/gaia/app -bench ^BenchmarkFullGaiaSimulation$$ -SimulationEnabled=true -SimulationNumBlocks=$(SIM_NUM_BLOCKS) -SimulationBlockSize=$(SIM_BLOCK_SIZE) -SimulationCommit=$(SIM_COMMIT) -timeout 24h -test_sim_gaia_slow: - @echo "Running full Gaia simulation. This may take awhile!" - @go test ./cmd/gaia/app -run TestFullGaiaSimulation -SimulationEnabled=true -SimulationNumBlocks=1000 -SimulationVerbose=true -v -timeout 24h +test_sim_gaia_profile: + @echo "Running Gaia benchmark for numBlocks=$(SIM_NUM_BLOCKS), blockSize=$(SIM_BLOCK_SIZE). This may take awhile!" + @go test -benchmem -run=^$$ github.com/cosmos/cosmos-sdk/cmd/gaia/app -bench ^BenchmarkFullGaiaSimulation$$ -SimulationEnabled=true -SimulationNumBlocks=$(SIM_NUM_BLOCKS) -SimulationBlockSize=$(SIM_BLOCK_SIZE) -SimulationCommit=$(SIM_COMMIT) -timeout 24h -cpuprofile cpu.out -memprofile mem.out test_cover: - @bash tests/test_cover.sh + @export VERSION=$(VERSION); bash tests/test_cover.sh test_lint: - gometalinter.v2 --config=tools/gometalinter.json ./... - !(gometalinter.v2 --disable-all --enable='errcheck' --vendor ./... | grep -v "client/") + gometalinter --config=tools/gometalinter.json ./... + !(gometalinter --exclude /usr/lib/go/src/ --exclude client/lcd/statik/statik.go --exclude 'vendor/*' --disable-all --enable='errcheck' --vendor ./... | grep -v "client/") find . -name '*.go' -type f -not -path "./vendor*" -not -path "*.git*" | xargs gofmt -d -s dep status >> /dev/null !(grep -n branch Gopkg.toml) format: - find . -name '*.go' -type f -not -path "./vendor*" -not -path "*.git*" | xargs gofmt -w -s - find . -name '*.go' -type f -not -path "./vendor*" -not -path "*.git*" | xargs misspell -w + find . -name '*.go' -type f -not -path "./vendor*" -not -path "*.git*" -not -path "./client/lcd/statik/statik.go" | xargs gofmt -w -s + find . -name '*.go' -type f -not -path "./vendor*" -not -path "*.git*" -not -path "./client/lcd/statik/statik.go" | xargs misspell -w benchmark: @go test -bench=. $(PACKAGES_NOSIMULATION) @@ -196,7 +236,7 @@ build-docker-gaiadnode: # Run a 4-node testnet locally localnet-start: localnet-stop - @if ! [ -f build/node0/gaiad/config/genesis.json ]; then docker run --rm -v $(CURDIR)/build:/gaiad:Z tendermint/gaiadnode testnet --v 4 --o . --starting-ip-address 192.168.10.2 ; fi + @if ! [ -f build/node0/gaiad/config/genesis.json ]; then docker run --rm -v $(CURDIR)/build:/gaiad:Z tendermint/gaiadnode testnet --v 4 -o . --starting-ip-address 192.168.10.2 ; fi docker-compose up -d # Stop testnet @@ -210,4 +250,4 @@ localnet-stop: check_tools check_dev_tools get_tools get_dev_tools get_vendor_deps draw_deps test test_cli test_unit \ test_cover test_lint benchmark devdoc_init devdoc devdoc_save devdoc_update \ build-linux build-docker-gaiadnode localnet-start localnet-stop \ -format check-ledger test_sim_modules test_sim_gaia_fast test_sim_gaia_slow update_tools update_dev_tools +format check-ledger test_sim_gaia_nondeterminism test_sim_modules test_sim_gaia_fast test_sim_gaia_multi_seed update_tools update_dev_tools diff --git a/PENDING.md b/PENDING.md index 9ade31c8b..4d87301f0 100644 --- a/PENDING.md +++ b/PENDING.md @@ -7,7 +7,6 @@ BREAKING CHANGES * Gaia CLI (`gaiacli`) * Gaia - * Make the transient store key use a distinct store key. [#2013](https://github.com/cosmos/cosmos-sdk/pull/2013) * SDK diff --git a/README.md b/README.md index 75761f5f4..067879d28 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,24 @@ +# Why we create this repo + +This repo is forked from [https://github.com/cosmos/cosmos-sdk](https://github.com/cosmos/cosmos-sdk). + +Our BinanceChain app leverages cosmos-sdk to fast build a dApp running with tendermint. As our app becomes more and more complex, the original cosmos-sdk can hardly fit all our requirements. +We changed a lot to our copied sdk, but it makes the future integration harder and harder. So we decided to fork cosmos-sdk. + +# How to use this repo + +We need to remove the original cosmos-sdk repo and clone our repo into that directory. +The reason is that we need to keep the import path. + +```bash +> cd $GOPATH/src/github.com +> rm -rf cosmos/cosmos-sdk +> git clone https://github.com/BiJie/bnc-cosmos-sdk.git cosmos/cosmos-sdk +> cd cosmos-sdk +> git checkout develop +> make get_vendor_deps +``` + # Cosmos SDK ![banner](docs/graphics/cosmos-sdk-image.png) @@ -21,7 +42,7 @@ breaking changes. ## Gaia Testnet -To join the latest testnet, follow +To join the latest testnet, follow [the guide](https://cosmos.network/docs/getting-started/full-node.html#setting-up-a-new-node). For status updates and genesis files, see the @@ -29,7 +50,7 @@ For status updates and genesis files, see the ## Install -See the +See the [install instructions](https://cosmos.network/docs/getting-started/installation.html). ## Quick Start @@ -42,4 +63,4 @@ See the [Cosmos Docs](https://cosmos.network/docs/) ## Disambiguation -This Cosmos-SDK project is not related to the [React-Cosmos](https://github.com/react-cosmos/react-cosmos) project (yet). Many thanks to Evan Coury and Ovidiu (@skidding) for this Github organization name. As per our agreement, this disambiguation notice will stay here. +This Cosmos-SDK project is not related to the [React-Cosmos](https://github.com/react-cosmos/react-cosmos) project (yet). Many thanks to Evan Coury and Ovidiu (@skidding) for this Github organization name. As per our agreement, this disambiguation notice will stay here. \ No newline at end of file diff --git a/baseapp/baseapp.go b/baseapp/baseapp.go index cf63f1f4d..16707cdb6 100644 --- a/baseapp/baseapp.go +++ b/baseapp/baseapp.go @@ -6,18 +6,23 @@ import ( "runtime/debug" "strings" + "github.com/hashicorp/golang-lru" "github.com/pkg/errors" - + "github.com/spf13/viper" abci "github.com/tendermint/tendermint/abci/types" + bc "github.com/tendermint/tendermint/blockchain" + cfg "github.com/tendermint/tendermint/config" "github.com/tendermint/tendermint/crypto/tmhash" cmn "github.com/tendermint/tendermint/libs/common" dbm "github.com/tendermint/tendermint/libs/db" "github.com/tendermint/tendermint/libs/log" + tmtypes "github.com/tendermint/tendermint/types" + "github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/store" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/version" - "github.com/cosmos/cosmos-sdk/wire" + "github.com/cosmos/cosmos-sdk/x/auth" ) // Key to store the header in the DB itself. @@ -26,30 +31,29 @@ import ( // and to avoid affecting the Merkle root. var dbHeaderKey = []byte("header") -// Enum mode for app.runTx -type runTxMode uint8 - const ( - // Check a transaction - runTxModeCheck runTxMode = iota - // Simulate a transaction - runTxModeSimulate runTxMode = iota - // Deliver a transaction - runTxModeDeliver runTxMode = iota + // we pass txHash of current handling message via context so that we can publish it as metadata of Msg + TxHashKey = "txHash" + //this number should be around the size of the transactions in a block, TODO: configurable + TxMsgCacheSize = 4000 ) // BaseApp reflects the ABCI application implementation. type BaseApp struct { // initialized on creation - Logger log.Logger - name string // application name from abci.Info - db dbm.DB // common DB backend - cms sdk.CommitMultiStore // Main (uncached) state - router Router // handle any kind of message - codespacer *sdk.Codespacer // handle module codespacing - txDecoder sdk.TxDecoder // unmarshal []byte into sdk.Tx + Logger log.Logger + name string // application name from abci.Info + db dbm.DB // common DB backend + cms sdk.CommitMultiStore // Main (uncached) state + router Router // handle any kind of message + queryRouter QueryRouter // router for redirecting query calls + codespacer *sdk.Codespacer // handle module codespacing + isPublishAccountBalance bool + + TxDecoder sdk.TxDecoder // unmarshal []byte into sdk.Tx anteHandler sdk.AnteHandler // ante handler for fee and auth + preChecker sdk.PreChecker // may be nil initChainer sdk.InitChainer // initialize state with validators and state blob @@ -60,12 +64,15 @@ type BaseApp struct { //-------------------- // Volatile - // checkState is set on initialization and reset on Commit. - // deliverState is set in InitChain and BeginBlock and cleared on Commit. - // See methods setCheckState and setDeliverState. - checkState *state // for CheckTx - deliverState *state // for DeliverTx - signedValidators []abci.SigningValidator // absent validators from begin block + // CheckState is set on initialization and reset on Commit. + // DeliverState is set in InitChain and BeginBlock and cleared on Commit. + // See methods SetCheckState and SetDeliverState. + CheckState *state // for CheckTx + DeliverState *state // for DeliverTx + + AccountStoreCache sdk.AccountStoreCache + txMsgCache *lru.Cache + Pool *sdk.Pool // flag for sealing sealed bool @@ -80,17 +87,25 @@ var _ abci.Application = (*BaseApp)(nil) // (e.g. functional options). // // NOTE: The db is used to store the version number for now. -// Accepts a user-defined txDecoder +// Accepts a user-defined TxDecoder // Accepts variable number of option functions, which act on the BaseApp to set configuration choices -func NewBaseApp(name string, logger log.Logger, db dbm.DB, txDecoder sdk.TxDecoder, options ...func(*BaseApp)) *BaseApp { +func NewBaseApp(name string, logger log.Logger, db dbm.DB, txDecoder sdk.TxDecoder, isPublish bool, options ...func(*BaseApp)) *BaseApp { + cache, err := lru.New(TxMsgCacheSize) + if err != nil { + panic(err) + } app := &BaseApp{ - Logger: logger, - name: name, - db: db, - cms: store.NewCommitMultiStore(db), - router: NewRouter(), - codespacer: sdk.NewCodespacer(), - txDecoder: txDecoder, + Logger: logger, + name: name, + db: db, + cms: store.NewCommitMultiStore(db), + router: NewRouter(), + queryRouter: NewQueryRouter(), + codespacer: sdk.NewCodespacer(), + TxDecoder: txDecoder, + isPublishAccountBalance: isPublish, + txMsgCache: cache, + Pool: new(sdk.Pool), } // Register the undefined & root codespaces, which should not be used by @@ -118,13 +133,20 @@ func (app *BaseApp) RegisterCodespace(codespace sdk.CodespaceType) sdk.Codespace return app.codespacer.RegisterNext(codespace) } -// Mount a store to the provided key in the BaseApp multistore +// Mount IAVL stores to the provided keys in the BaseApp multistore func (app *BaseApp) MountStoresIAVL(keys ...*sdk.KVStoreKey) { for _, key := range keys { app.MountStore(key, sdk.StoreTypeIAVL) } } +// Mount stores to the provided keys in the BaseApp multistore +func (app *BaseApp) MountStoresTransient(keys ...*sdk.TransientStoreKey) { + for _, key := range keys { + app.MountStore(key, sdk.StoreTypeTransient) + } +} + // Mount a store to the provided key in the BaseApp multistore, using a specified DB func (app *BaseApp) MountStoreWithDB(key sdk.StoreKey, typ sdk.StoreType, db dbm.DB) { app.cms.MountStoreWithDB(key, typ, db) @@ -135,6 +157,15 @@ func (app *BaseApp) MountStore(key sdk.StoreKey, typ sdk.StoreType) { app.cms.MountStoreWithDB(key, typ, nil) } +// only load latest multi store application version +func (app *BaseApp) LoadCMSLatestVersion() error { + err := app.cms.LoadLatestVersion() + if err != nil { + return err + } + return nil +} + // load latest application version func (app *BaseApp) LoadLatestVersion(mainKey sdk.StoreKey) error { err := app.cms.LoadLatestVersion() @@ -144,6 +175,11 @@ func (app *BaseApp) LoadLatestVersion(mainKey sdk.StoreKey) error { return app.initFromStore(mainKey) } +// InitFromStore initializes the remaining logic from app.cms +func (app *BaseApp) InitFromStore(mainKey sdk.StoreKey) error { + return app.initFromStore(mainKey) +} + // load application version func (app *BaseApp) LoadVersion(version int64, mainKey sdk.StoreKey) error { err := app.cms.LoadVersion(version) @@ -163,6 +199,22 @@ func (app *BaseApp) LastBlockHeight() int64 { return app.cms.LastCommitID().Version } +// +func (app *BaseApp) GetCommitMultiStore() sdk.CommitMultiStore { + return app.cms +} + +func LoadBlockDB() dbm.DB { + conf := cfg.DefaultConfig() + err := viper.Unmarshal(conf) + if err != nil { + panic(err) + } + + dbType := dbm.DBBackendType(conf.DBBackend) + return dbm.NewDB("blockstore", dbType, conf.DBDir()) +} + // initializes the remaining logic from app.cms func (app *BaseApp) initFromStore(mainKey sdk.StoreKey) error { // main store should exist. @@ -172,46 +224,81 @@ func (app *BaseApp) initFromStore(mainKey sdk.StoreKey) error { return errors.New("baseapp expects MultiStore with 'main' KVStore") } // Needed for `gaiad export`, which inits from store but never calls initchain - app.setCheckState(abci.Header{}) + appHeight := app.LastBlockHeight() + if appHeight == 0 { + app.SetCheckState(abci.Header{}) + } else { + blockDB := LoadBlockDB() + blockStore := bc.NewBlockStore(blockDB) + // note here we use appHeight, not current block store height, appHeight may be far behind storeHeight + lastHeader := blockStore.LoadBlock(appHeight).Header + app.SetCheckState(tmtypes.TM2PB.Header(&lastHeader)) + blockDB.Close() + } - app.Seal() + //TODO(#118): figure out what does this mean! If we keep this, we will get panic: Router() on sealed BaseApp at github.com/BiJie/BinanceChain/app.(*BinanceChain).GetRouter(0xc0004bc080, 0xc000c14000, 0xc0007b9808) + // /Users/zhaocong/go/src/github.com/BiJie/BinanceChain/app/app.go:297 +0x6b + //app.Seal() return nil } // NewContext returns a new Context with the correct store, the given header, and nil txBytes. -func (app *BaseApp) NewContext(isCheckTx bool, header abci.Header) sdk.Context { - if isCheckTx { - return sdk.NewContext(app.checkState.ms, header, true, app.Logger) +func (app *BaseApp) NewContext(mode sdk.RunTxMode, header abci.Header) sdk.Context { + var ms sdk.CacheMultiStore + var accountCache sdk.AccountCache + + switch mode { + case sdk.RunTxModeDeliver: + ms = app.DeliverState.ms + accountCache = app.DeliverState.AccountCache + default: + ms = app.CheckState.ms + accountCache = app.CheckState.AccountCache } - return sdk.NewContext(app.deliverState.ms, header, false, app.Logger) + return sdk.NewContext(ms, header, mode, app.Logger).WithAccountCache(accountCache) } type state struct { - ms sdk.CacheMultiStore - ctx sdk.Context + ms sdk.CacheMultiStore + AccountCache sdk.AccountCache + Ctx sdk.Context } func (st *state) CacheMultiStore() sdk.CacheMultiStore { return st.ms.CacheMultiStore() } -func (app *BaseApp) setCheckState(header abci.Header) { +func (st *state) WriteAccountCache() { + st.AccountCache.Write() +} + +func (app *BaseApp) SetCheckState(header abci.Header) { + accountCache := auth.NewAccountCache(app.AccountStoreCache) + ms := app.cms.CacheMultiStore() - app.checkState = &state{ - ms: ms, - ctx: sdk.NewContext(ms, header, true, app.Logger), + app.CheckState = &state{ + ms: ms, + AccountCache: accountCache, + Ctx: sdk.NewContext(ms, header, sdk.RunTxModeCheck, app.Logger).WithAccountCache(accountCache), } } -func (app *BaseApp) setDeliverState(header abci.Header) { +func (app *BaseApp) SetDeliverState(header abci.Header) { + accountCache := auth.NewAccountCache(app.AccountStoreCache) + ms := app.cms.CacheMultiStore() - app.deliverState = &state{ - ms: ms, - ctx: sdk.NewContext(ms, header, false, app.Logger), + app.DeliverState = &state{ + ms: ms, + AccountCache: accountCache, + Ctx: sdk.NewContext(ms, header, sdk.RunTxModeDeliver, app.Logger).WithAccountCache(accountCache), } } +func (app *BaseApp) SetAccountStoreCache(cdc *codec.Codec, accountStore sdk.KVStore, cap int) { + app.AccountStoreCache = auth.NewAccountStoreCache(cdc, accountStore, cap) +} + //______________________________________________________________________________ // ABCI @@ -237,16 +324,19 @@ func (app *BaseApp) SetOption(req abci.RequestSetOption) (res abci.ResponseSetOp // InitChain runs the initialization logic directly on the CommitMultiStore and commits it. func (app *BaseApp) InitChain(req abci.RequestInitChain) (res abci.ResponseInitChain) { // Initialize the deliver state and check state with ChainID and run initChain - app.setDeliverState(abci.Header{ChainID: req.ChainId}) - app.setCheckState(abci.Header{ChainID: req.ChainId}) + app.SetDeliverState(abci.Header{ChainID: req.ChainId}) + app.SetCheckState(abci.Header{ChainID: req.ChainId}) if app.initChainer == nil { return } - res = app.initChainer(app.deliverState.ctx, req) + res = app.initChainer(app.DeliverState.Ctx, req) + + // we need to write updates to underlying cache and storage + app.DeliverState.WriteAccountCache() // NOTE: we don't commit, but BeginBlock for block 1 - // starts from this deliverState + // starts from this DeliverState return } @@ -266,7 +356,8 @@ func (app *BaseApp) FilterPeerByPubKey(info string) abci.ResponseQuery { return abci.ResponseQuery{} } -func splitPath(requestPath string) (path []string) { +// Splits a string path using the delimter '/'. i.e. "this/is/funny" becomes []string{"this", "is", "funny"} +func SplitPath(requestPath string) (path []string) { path = strings.Split(requestPath, "/") // first element is empty string if len(path) > 0 && path[0] == "" { @@ -278,7 +369,7 @@ func splitPath(requestPath string) (path []string) { // Implements ABCI. // Delegates to CommitMultiStore if it implements Queryable func (app *BaseApp) Query(req abci.RequestQuery) (res abci.ResponseQuery) { - path := splitPath(req.Path) + path := SplitPath(req.Path) if len(path) == 0 { msg := "no query path provided" return sdk.ErrUnknownRequest(msg).QueryResult() @@ -291,6 +382,8 @@ func (app *BaseApp) Query(req abci.RequestQuery) (res abci.ResponseQuery) { return handleQueryStore(app, path, req) case "p2p": return handleQueryP2P(app, path, req) + case "custom": + return handleQueryCustom(app, path, req) } msg := "unknown query path" @@ -303,7 +396,7 @@ func handleQueryApp(app *BaseApp, path []string, req abci.RequestQuery) (res abc switch path[1] { case "simulate": txBytes := req.Data - tx, err := app.txDecoder(txBytes) + tx, err := app.TxDecoder(txBytes) if err != nil { result = err.Result() } else { @@ -319,7 +412,7 @@ func handleQueryApp(app *BaseApp, path []string, req abci.RequestQuery) (res abc } // Encode with json - value := wire.Cdc.MustMarshalBinary(result) + value := codec.Cdc.MustMarshalBinaryLengthPrefixed(result) return abci.ResponseQuery{ Code: uint32(sdk.ABCICodeOK), Value: value, @@ -340,6 +433,7 @@ func handleQueryStore(app *BaseApp, path []string, req abci.RequestQuery) (res a return queryable.Query(req) } +// nolint: unparam func handleQueryP2P(app *BaseApp, path []string, req abci.RequestQuery) (res abci.ResponseQuery) { // "/p2p" prefix for p2p queries if len(path) >= 4 { @@ -362,6 +456,35 @@ func handleQueryP2P(app *BaseApp, path []string, req abci.RequestQuery) (res abc return sdk.ErrUnknownRequest(msg).QueryResult() } +func handleQueryCustom(app *BaseApp, path []string, req abci.RequestQuery) (res abci.ResponseQuery) { + // path[0] should be "custom" because "/custom" prefix is required for keeper queries. + // the queryRouter routes using path[1]. For example, in the path "custom/gov/proposal", queryRouter routes using "gov" + if len(path) < 2 || path[1] == "" { + return sdk.ErrUnknownRequest("No route for custom query specified").QueryResult() + } + querier := app.queryRouter.Route(path[1]) + if querier == nil { + return sdk.ErrUnknownRequest(fmt.Sprintf("no custom querier found for route %s", path[1])).QueryResult() + } + + ctx := sdk.NewContext(app.cms.CacheMultiStore(), app.CheckState.Ctx.BlockHeader(), sdk.RunTxModeCheck, app.Logger) + ctx = ctx.WithAccountCache(auth.NewAccountCache(app.AccountStoreCache)) + + // Passes the rest of the path as an argument to the querier. + // For example, in the path "custom/gov/proposal/test", the gov querier gets []string{"proposal", "test"} as the path + resBytes, err := querier(ctx, path[2:], req) + if err != nil { + return abci.ResponseQuery{ + Code: uint32(err.ABCICode()), + Log: err.ABCILog(), + } + } + return abci.ResponseQuery{ + Code: uint32(sdk.ABCICodeOK), + Value: resBytes, + } +} + // BeginBlock implements the ABCI application interface. func (app *BaseApp) BeginBlock(req abci.RequestBeginBlock) (res abci.ResponseBeginBlock) { if app.cms.TracingEnabled() { @@ -372,48 +495,131 @@ func (app *BaseApp) BeginBlock(req abci.RequestBeginBlock) (res abci.ResponseBeg } // Initialize the DeliverTx state. If this is the first block, it should - // already be initialized in InitChain. Otherwise app.deliverState will be + // already be initialized in InitChain. Otherwise app.DeliverState will be // nil, since it is reset on Commit. - if app.deliverState == nil { - app.setDeliverState(req.Header) + if app.DeliverState == nil { + app.SetDeliverState(req.Header) + app.DeliverState.Ctx = app.DeliverState.Ctx.WithVoteInfos(req.LastCommitInfo.GetVotes()) } else { - // In the first block, app.deliverState.ctx will already be initialized + // In the first block, app.DeliverState.Ctx will already be initialized // by InitChain. Context is now updated with Header information. - app.deliverState.ctx = app.deliverState.ctx.WithBlockHeader(req.Header).WithBlockHeight(req.Header.Height) + app.DeliverState.Ctx = app.DeliverState.Ctx.WithBlockHeader(req.Header).WithBlockHeight(req.Header.Height) } if app.beginBlocker != nil { - res = app.beginBlocker(app.deliverState.ctx, req) + res = app.beginBlocker(app.DeliverState.Ctx, req) } - // set the signed validators for addition to context in deliverTx - // TODO: communicate this result to the address to pubkey map in slashing - app.signedValidators = req.LastCommitInfo.GetValidators() return } +//getTxFromCache returns a decoded transaction and true if found in the cache; +//otherwise return nil, false +func (app *BaseApp) GetTxFromCache(txBytes []byte) (sdk.Tx, bool) { + if i, ok := app.txMsgCache.Get(string(txBytes)); ok { + tx, o := i.(sdk.Tx) + return tx, o + } + return nil, false +} + +func (app *BaseApp) AddTxToCache(txBytes []byte, tx sdk.Tx) (evicted bool) { + return app.txMsgCache.Add(string(txBytes), tx) +} + +func (app *BaseApp) RemoveTxFromCache(txBytes []byte) { + app.txMsgCache.Remove(string(txBytes)) +} + // CheckTx implements ABCI // CheckTx runs the "basic checks" to see whether or not a transaction can possibly be executed, // first decoding, then the ante handler (which checks signatures/fees/ValidateBasic), // then finally the route match to see whether a handler exists. CheckTx does not run the actual // Msg handler function(s). func (app *BaseApp) CheckTx(txBytes []byte) (res abci.ResponseCheckTx) { - // Decode the Tx. var result sdk.Result - var tx, err = app.txDecoder(txBytes) - if err != nil { - result = err.Result() + var tx sdk.Tx + // try to get the Tx first from cache, if succeed, it means it is PreChecked. + tx, ok := app.GetTxFromCache(txBytes) + if ok { + txHash := cmn.HexBytes(tmhash.Sum(txBytes)).String() + app.Logger.Debug("Handle CheckTx", "Tx", txHash) + result = app.RunTx(sdk.RunTxModeCheckAfterPre, txBytes, tx, txHash) } else { - result = app.runTx(runTxModeCheck, txBytes, tx) + tx, err := app.TxDecoder(txBytes) + if err != nil { + result = err.Result() + } else { + app.txMsgCache.Add(string(txBytes), tx) // for recheck + txHash := cmn.HexBytes(tmhash.Sum(txBytes)).String() + app.Logger.Debug("Handle CheckTx", "Tx", txHash) + result = app.RunTx(sdk.RunTxModeCheck, txBytes, tx, txHash) + } + } + + if !result.IsOK() { + app.txMsgCache.Remove(string(txBytes)) //not usable by DeliverTx + } + + return abci.ResponseCheckTx{ + Code: uint32(result.Code), + Data: result.Data, + Log: result.Log, + Tags: result.Tags, } +} +func (app *BaseApp) preCheck(txBytes []byte, mode sdk.RunTxMode) sdk.Result { + var res sdk.Result + if app.preChecker != nil && !app.txMsgCache.Contains(string(txBytes)) { + var tx, err = app.TxDecoder(txBytes) + if err != nil { + res = err.Result() + } else { + res = app.preChecker(getState(app, mode).Ctx, txBytes, tx) + if res.IsOK() { + app.txMsgCache.Add(string(txBytes), tx) + } + } + } + return res +} + +// PreCheckTx implements extended ABCI for concurrency +// PreCheckTx would perform decoding, signture and other basic verification +func (app *BaseApp) PreCheckTx(txBytes []byte) (res abci.ResponseCheckTx) { + result := app.preCheck(txBytes, sdk.RunTxModeCheck) return abci.ResponseCheckTx{ - Code: uint32(result.Code), - Data: result.Data, - Log: result.Log, - GasWanted: result.GasWanted, - GasUsed: result.GasUsed, - Tags: result.Tags, + Code: uint32(result.Code), + Data: result.Data, + Log: result.Log, + Tags: result.Tags, + } +} + +// ReCheckTx implements ABCI +// ReCheckTx runs the "minimun checks", after the inital check, +// to see whether or not a transaction can possibly be executed. +func (app *BaseApp) ReCheckTx(txBytes []byte) (res abci.ResponseCheckTx) { + // Decode the Tx. + var result sdk.Result + tx, ok := app.GetTxFromCache(txBytes) + if ok { + result = app.ReRunTx(txBytes, tx) + } else { // not suppose to enter here actually + var tx, err = app.TxDecoder(txBytes) + if err != nil { + result = err.Result() + } else { + result = app.ReRunTx(txBytes, tx) + } + } + + return abci.ResponseCheckTx{ + Code: uint32(result.Code), + Data: result.Data, + Log: result.Log, + Tags: result.Tags, } } @@ -421,11 +627,22 @@ func (app *BaseApp) CheckTx(txBytes []byte) (res abci.ResponseCheckTx) { func (app *BaseApp) DeliverTx(txBytes []byte) (res abci.ResponseDeliverTx) { // Decode the Tx. var result sdk.Result - var tx, err = app.txDecoder(txBytes) - if err != nil { - result = err.Result() + tx, ok := app.GetTxFromCache(txBytes) //from checkTx + if ok { + // here means either the tx has passed PreDeliverTx or CheckTx, + // no need to verify signature + txHash := cmn.HexBytes(tmhash.Sum(txBytes)).String() + app.Logger.Debug("Handle DeliverTx", "Tx", txHash) + result = app.RunTx(sdk.RunTxModeDeliverAfterPre, txBytes, tx, txHash) } else { - result = app.runTx(runTxModeDeliver, txBytes, tx) + var tx, err = app.TxDecoder(txBytes) + if err != nil { + result = err.Result() + } else { + txHash := cmn.HexBytes(tmhash.Sum(txBytes)).String() + app.Logger.Debug("Handle DeliverTx", "Tx", txHash) + result = app.RunTx(sdk.RunTxModeDeliver, txBytes, tx, txHash) + } } // Even though the Result.Code is not OK, there are still effects, @@ -433,20 +650,30 @@ func (app *BaseApp) DeliverTx(txBytes []byte) (res abci.ResponseDeliverTx) { // Tell the blockchain engine (i.e. Tendermint). return abci.ResponseDeliverTx{ - Code: uint32(result.Code), - Data: result.Data, - Log: result.Log, - GasWanted: result.GasWanted, - GasUsed: result.GasUsed, - Tags: result.Tags, + Code: uint32(result.Code), + Data: result.Data, + Log: result.Log, + Tags: result.Tags, + } +} + +// PreDeliverTx implements extended ABCI for concurrency +// PreCheckTx would perform decoding, signture and other basic verification +func (app *BaseApp) PreDeliverTx(txBytes []byte) (res abci.ResponseDeliverTx) { + result := app.preCheck(txBytes, sdk.RunTxModeDeliver) + return abci.ResponseDeliverTx{ + Code: uint32(result.Code), + Data: result.Data, + Log: result.Log, + Tags: result.Tags, } } // Basic validator for msgs func validateBasicTxMsgs(msgs []sdk.Msg) sdk.Error { - if msgs == nil || len(msgs) == 0 { + if msgs == nil || len(msgs) != 1 { // TODO: probably shouldn't be ErrInternal. Maybe new ErrInvalidMessage, or ? - return sdk.ErrInternal("Tx.GetMsgs() must return at least one message in list") + return sdk.ErrInternal("Tx.GetMsgs() must return exactly one message") } for _, msg := range msgs { @@ -461,20 +688,20 @@ func validateBasicTxMsgs(msgs []sdk.Msg) sdk.Error { return nil } -func (app *BaseApp) getContextForAnte(mode runTxMode, txBytes []byte) (ctx sdk.Context) { +// retrieve the context for the ante handler and store the tx bytes; +func (app *BaseApp) getContextForAnte(mode sdk.RunTxMode, txBytes []byte) (ctx sdk.Context) { // Get the context - if mode == runTxModeCheck || mode == runTxModeSimulate { - ctx = app.checkState.ctx.WithTxBytes(txBytes) - } else { - ctx = app.deliverState.ctx.WithTxBytes(txBytes) - ctx = ctx.WithSigningValidators(app.signedValidators) + ctx = getState(app, mode).Ctx.WithTxBytes(txBytes) + // Simulate a DeliverTx + if mode == sdk.RunTxModeSimulate { + ctx = ctx.WithRunTxMode(mode) } return } // Iterates through msgs and executes them -func (app *BaseApp) runMsgs(ctx sdk.Context, msgs []sdk.Msg, mode runTxMode) (result sdk.Result) { +func (app *BaseApp) runMsgs(ctx sdk.Context, msgs []sdk.Msg, txHash string, mode sdk.RunTxMode) (result sdk.Result) { // accumulate results logs := make([]string, 0, len(msgs)) var data []byte // NOTE: we just append them all (?!) @@ -482,20 +709,14 @@ func (app *BaseApp) runMsgs(ctx sdk.Context, msgs []sdk.Msg, mode runTxMode) (re var code sdk.ABCICodeType for msgIdx, msg := range msgs { // Match route. - msgType := msg.Type() - handler := app.router.Route(msgType) + msgRoute := msg.Route() + handler := app.router.Route(msgRoute) if handler == nil { - return sdk.ErrUnknownRequest("Unrecognized Msg type: " + msgType).Result() + return sdk.ErrUnknownRequest("Unrecognized Msg type: " + msgRoute).Result() } - var msgResult sdk.Result - // Skip actual execution for CheckTx - if mode != runTxModeCheck { - msgResult = handler(ctx, msg) - } - - // NOTE: GasWanted is determined by ante handler and - // GasUsed by the GasMeter + msgResult := handler(ctx.WithValue(TxHashKey, txHash).WithRunTxMode(mode), msg) + msgResult.Tags = append(msgResult.Tags, sdk.MakeTag("action", []byte(msg.Type()))) // Append Data and Tags data = append(data, msgResult.Data...) @@ -503,7 +724,7 @@ func (app *BaseApp) runMsgs(ctx sdk.Context, msgs []sdk.Msg, mode runTxMode) (re // Stop execution and return on first failed message. if !msgResult.IsOK() { - logs = append(logs, fmt.Sprintf("Msg %d failed: %s", msgIdx, msgResult.Log)) + logs = append(logs, fmt.Sprintf("%s", msgResult.Log)) code = msgResult.Code break } @@ -512,12 +733,10 @@ func (app *BaseApp) runMsgs(ctx sdk.Context, msgs []sdk.Msg, mode runTxMode) (re logs = append(logs, fmt.Sprintf("Msg %d: %s", msgIdx, msgResult.Log)) } - // Set the final gas values. result = sdk.Result{ - Code: code, - Data: data, - Log: strings.Join(logs, "\n"), - GasUsed: ctx.GasMeter().GasConsumed(), + Code: code, + Data: data, + Log: strings.Join(logs, "\n"), // TODO: FeeAmount/FeeDenom Tags: tags, } @@ -525,77 +744,153 @@ func (app *BaseApp) runMsgs(ctx sdk.Context, msgs []sdk.Msg, mode runTxMode) (re return result } -// Returns the applicantion's deliverState if app is in runTxModeDeliver, +// Returns the applicantion's DeliverState if app is in runTxModeDeliver, // otherwise it returns the application's checkstate. -func getState(app *BaseApp, mode runTxMode) *state { - if mode == runTxModeCheck || mode == runTxModeSimulate { - return app.checkState +func getState(app *BaseApp, mode sdk.RunTxMode) *state { + if mode == sdk.RunTxModeCheckAfterPre || + mode == sdk.RunTxModeCheck || + mode == sdk.RunTxModeSimulate || + mode == sdk.RunTxModeReCheck { + return app.CheckState } - return app.deliverState + return app.DeliverState +} + +// Returns AccountCache of CheckState or DeliverState according to the tx mode +func getAccountCache(app *BaseApp, mode sdk.RunTxMode) sdk.AccountCache { + if mode == sdk.RunTxModeCheckAfterPre || + mode == sdk.RunTxModeCheck || + mode == sdk.RunTxModeSimulate || + mode == sdk.RunTxModeReCheck { + return app.CheckState.AccountCache + } + + return app.DeliverState.AccountCache +} + +func (app *BaseApp) initializeContext(ctx sdk.Context, mode sdk.RunTxMode) sdk.Context { + if mode == sdk.RunTxModeSimulate { + ctx = ctx.WithMultiStore(getState(app, mode).CacheMultiStore()). + WithAccountCache(getAccountCache(app, mode).Cache()) + } + return ctx } -// runTx processes a transaction. The transactions is proccessed via an +// RunTx processes a transaction. The transactions is proccessed via an // anteHandler. txBytes may be nil in some cases, eg. in tests. Also, in the // future we may support "internal" transactions. -func (app *BaseApp) runTx(mode runTxMode, txBytes []byte, tx sdk.Tx) (result sdk.Result) { - // NOTE: GasWanted should be returned by the AnteHandler. GasUsed is - // determined by the GasMeter. We need access to the context to get the gas +func (app *BaseApp) RunTx(mode sdk.RunTxMode, txBytes []byte, tx sdk.Tx, txHash string) (result sdk.Result) { // meter so we initialize upfront. - var gasWanted int64 + var msCache sdk.CacheMultiStore ctx := app.getContextForAnte(mode, txBytes) + ctx = app.initializeContext(ctx, mode) defer func() { if r := recover(); r != nil { - switch rType := r.(type) { - case sdk.ErrorOutOfGas: - log := fmt.Sprintf("out of gas in location: %v", rType.Descriptor) - result = sdk.ErrOutOfGas(log).Result() - default: - log := fmt.Sprintf("recovered: %v\nstack:\n%v", r, string(debug.Stack())) - result = sdk.ErrInternal(log).Result() - } + log := fmt.Sprintf("recovered: %v\nstack:\n%v", r, string(debug.Stack())) + result = sdk.ErrInternal(log).Result() } - result.GasWanted = gasWanted - result.GasUsed = ctx.GasMeter().GasConsumed() }() var msgs = tx.GetMsgs() - - err := validateBasicTxMsgs(msgs) - if err != nil { + if err := validateBasicTxMsgs(msgs); err != nil { return err.Result() } // run the ante handler if app.anteHandler != nil { - newCtx, result, abort := app.anteHandler(ctx, tx) + newCtx, result, abort := app.anteHandler(ctx.WithValue(TxHashKey, txHash), tx, mode) if abort { return result } if !newCtx.IsZero() { ctx = newCtx } + } + + if mode == sdk.RunTxModeSimulate { + result = app.runMsgs(ctx, msgs, txHash, mode) + return + } + + // Keep the state in a transient CacheWrap in case processing the messages + // fails. + msCache = getState(app, mode).CacheMultiStore() + if msCache.TracingEnabled() { + msCache = msCache.WithTracingContext(sdk.TraceContext( + map[string]interface{}{"txHash": txHash}, + )).(sdk.CacheMultiStore) + } + + accountCache := getAccountCache(app, mode).Cache() + + ctx = ctx.WithMultiStore(msCache) + ctx = ctx.WithAccountCache(accountCache) + result = app.runMsgs(ctx, msgs, txHash, mode) + + // only update state if all messages pass + if result.IsOK() { + if (mode == sdk.RunTxModeDeliver || mode == sdk.RunTxModeDeliverAfterPre) && app.isPublishAccountBalance { + app.Pool.AddAddrs(msgs[0].GetInvolvedAddresses()) + } + accountCache.Write() + msCache.Write() + } + + return +} - gasWanted = result.GasWanted +// RunTx processes a transaction. The transactions is proccessed via an +// anteHandler. txBytes may be nil in some cases, eg. in tests. Also, in the +// future we may support "internal" transactions. +func (app *BaseApp) ReRunTx(txBytes []byte, tx sdk.Tx) (result sdk.Result) { + // meter so we initialize upfront. + var msCache sdk.CacheMultiStore + mode := sdk.RunTxModeReCheck + ctx := app.getContextForAnte(mode, txBytes) + + defer func() { + if r := recover(); r != nil { + log := fmt.Sprintf("recovered: %v\nstack:\n%v", r, string(debug.Stack())) + result = sdk.ErrInternal(log).Result() + } + + }() + + txHash := cmn.HexBytes(tmhash.Sum(txBytes)).String() + + // run the ante handler + if app.anteHandler != nil { + newCtx, result, abort := app.anteHandler(ctx.WithValue(TxHashKey, txHash), tx, mode) + if abort { + return result + } + if !newCtx.IsZero() { + ctx = newCtx + } } // Keep the state in a transient CacheWrap in case processing the messages // fails. - msCache := getState(app, mode).CacheMultiStore() + msCache = getState(app, mode).CacheMultiStore() if msCache.TracingEnabled() { msCache = msCache.WithTracingContext(sdk.TraceContext( - map[string]interface{}{"txHash": cmn.HexBytes(tmhash.Sum(txBytes)).String()}, + map[string]interface{}{"txHash": txHash}, )).(sdk.CacheMultiStore) } + accountCache := getAccountCache(app, mode).Cache() + ctx = ctx.WithMultiStore(msCache) - result = app.runMsgs(ctx, msgs, mode) - result.GasWanted = gasWanted + ctx = ctx.WithAccountCache(accountCache) + var msgs = tx.GetMsgs() + result = app.runMsgs(ctx, msgs, txHash, mode) - // only update state if all messages pass and we're not in a simulation - if result.IsOK() && mode != runTxModeSimulate { + // only update state if all messages pass + if result.IsOK() { + accountCache.Write() msCache.Write() } @@ -604,12 +899,12 @@ func (app *BaseApp) runTx(mode runTxMode, txBytes []byte, tx sdk.Tx) (result sdk // EndBlock implements the ABCI application interface. func (app *BaseApp) EndBlock(req abci.RequestEndBlock) (res abci.ResponseEndBlock) { - if app.deliverState.ms.TracingEnabled() { - app.deliverState.ms = app.deliverState.ms.ResetTraceContext().(sdk.CacheMultiStore) + if app.DeliverState.ms.TracingEnabled() { + app.DeliverState.ms = app.DeliverState.ms.ResetTraceContext().(sdk.CacheMultiStore) } if app.endBlocker != nil { - res = app.endBlocker(app.deliverState.ctx, req) + res = app.endBlocker(app.DeliverState.Ctx, req) } return @@ -617,7 +912,7 @@ func (app *BaseApp) EndBlock(req abci.RequestEndBlock) (res abci.ResponseEndBloc // Implements ABCI func (app *BaseApp) Commit() (res abci.ResponseCommit) { - header := app.deliverState.ctx.BlockHeader() + header := app.DeliverState.Ctx.BlockHeader() /* // Write the latest Header to the store headerBytes, err := proto.Marshal(&header) @@ -628,7 +923,8 @@ func (app *BaseApp) Commit() (res abci.ResponseCommit) { */ // Write the Deliver state and commit the MultiStore - app.deliverState.ms.Write() + app.DeliverState.WriteAccountCache() + app.DeliverState.ms.Write() commitID := app.cms.Commit() // TODO: this is missing a module identifier and dumps byte array app.Logger.Debug("Commit synced", @@ -638,10 +934,11 @@ func (app *BaseApp) Commit() (res abci.ResponseCommit) { // Reset the Check state to the latest committed // NOTE: safe because Tendermint holds a lock on the mempool for Commit. // Use the header from this latest block. - app.setCheckState(header) + app.SetCheckState(header) // Empty the Deliver state - app.deliverState = nil + app.DeliverState = nil + app.Pool.ClearTxRelatedAddrs() return abci.ResponseCommit{ Data: commitID.Hash, diff --git a/baseapp/baseapp_test.go b/baseapp/baseapp_test.go index 269424e58..c2953cf2c 100644 --- a/baseapp/baseapp_test.go +++ b/baseapp/baseapp_test.go @@ -3,6 +3,7 @@ package baseapp import ( "bytes" "encoding/binary" + "errors" "fmt" "os" "testing" @@ -14,8 +15,8 @@ import ( dbm "github.com/tendermint/tendermint/libs/db" "github.com/tendermint/tendermint/libs/log" + "github.com/cosmos/cosmos-sdk/codec" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/cosmos/cosmos-sdk/wire" ) var ( @@ -34,14 +35,14 @@ func defaultLogger() log.Logger { func newBaseApp(name string, options ...func(*BaseApp)) *BaseApp { logger := defaultLogger() db := dbm.NewMemDB() - codec := wire.NewCodec() + codec := codec.New() registerTestCodec(codec) - return NewBaseApp(name, logger, db, testTxDecoder(codec), options...) + return NewBaseApp(name, logger, db, testTxDecoder(codec), false, options...) } -func registerTestCodec(cdc *wire.Codec) { +func registerTestCodec(cdc *codec.Codec) { // register Tx, Msg - sdk.RegisterWire(cdc) + sdk.RegisterCodec(cdc) // register test types cdc.RegisterConcrete(&txTest{}, "cosmos-sdk/baseapp/txTest", nil) @@ -66,6 +67,44 @@ func setupBaseApp(t *testing.T, options ...func(*BaseApp)) *BaseApp { return app } +type MockBaseApp struct { + *BaseApp +} + +// The following methods just keep same logic with original cosmos code before we fork to make test can pass +func (app *MockBaseApp) LoadLatestVersion(mainKey sdk.StoreKey) error { + err := app.cms.LoadLatestVersion() + if err != nil { + return err + } + return app.initFromStore(mainKey) +} + +func (app *MockBaseApp) LoadVersion(version int64, mainKey sdk.StoreKey) error { + err := app.cms.LoadVersion(version) + if err != nil { + return err + } + return app.initFromStore(mainKey) +} + +func (app *MockBaseApp) initFromStore(mainKey sdk.StoreKey) error { + // main store should exist. + // TODO: we don't actually need the main store here + main := app.cms.GetKVStore(mainKey) + if main == nil { + return errors.New("baseapp expects MultiStore with 'main' KVStore") + } + + app.SetCheckState(abci.Header{}) + + return nil +} + +func NewMockBaseApp(name string, logger log.Logger, db dbm.DB, txDecoder sdk.TxDecoder, isPublish bool, options ...func(*BaseApp)) *MockBaseApp { + return &MockBaseApp{NewBaseApp(name, logger, db, txDecoder, isPublish, options...)} +} + //------------------------------------------------------------------------------------------ // test mounting and loading stores @@ -85,7 +124,7 @@ func TestLoadVersion(t *testing.T) { logger := defaultLogger() db := dbm.NewMemDB() name := t.Name() - app := NewBaseApp(name, logger, db, nil) + app := NewMockBaseApp(name, logger, db, nil, false) // make a cap key and mount the store capKey := sdk.NewKVStoreKey("main") @@ -114,7 +153,7 @@ func TestLoadVersion(t *testing.T) { commitID2 := sdk.CommitID{2, res.Data} // reload with LoadLatestVersion - app = NewBaseApp(name, logger, db, nil) + app = NewMockBaseApp(name, logger, db, nil, false) app.MountStoresIAVL(capKey) err = app.LoadLatestVersion(capKey) require.Nil(t, err) @@ -122,7 +161,7 @@ func TestLoadVersion(t *testing.T) { // reload with LoadVersion, see if you can commit the same block and get // the same result - app = NewBaseApp(name, logger, db, nil) + app = NewMockBaseApp(name, logger, db, nil, false) app.MountStoresIAVL(capKey) err = app.LoadVersion(1, capKey) require.Nil(t, err) @@ -132,7 +171,7 @@ func TestLoadVersion(t *testing.T) { testLoadVersionHelper(t, app, int64(2), commitID2) } -func testLoadVersionHelper(t *testing.T, app *BaseApp, expectedHeight int64, expectedID sdk.CommitID) { +func testLoadVersionHelper(t *testing.T, app *MockBaseApp, expectedHeight int64, expectedID sdk.CommitID) { lastHeight := app.LastBlockHeight() lastID := app.LastCommitID() require.Equal(t, expectedHeight, lastHeight) @@ -142,7 +181,7 @@ func testLoadVersionHelper(t *testing.T, app *BaseApp, expectedHeight int64, exp func TestOptionFunction(t *testing.T) { logger := defaultLogger() db := dbm.NewMemDB() - bap := NewBaseApp("starting name", logger, db, nil, testChangeNameHelper("new name")) + bap := NewMockBaseApp("starting name", logger, db, nil, false, testChangeNameHelper("new name")) require.Equal(t, bap.name, "new name", "BaseApp should have had name changed via option function") } @@ -214,7 +253,7 @@ func TestInitChainer(t *testing.T) { // we can reload the same app later db := dbm.NewMemDB() logger := defaultLogger() - app := NewBaseApp(name, logger, db, nil) + app := NewMockBaseApp(name, logger, db, nil, false) capKey := sdk.NewKVStoreKey("main") capKey2 := sdk.NewKVStoreKey("key2") app.MountStoresIAVL(capKey, capKey2) @@ -247,18 +286,18 @@ func TestInitChainer(t *testing.T) { app.InitChain(abci.RequestInitChain{AppStateBytes: []byte("{}"), ChainId: "test-chain-id"}) // must have valid JSON genesis file, even if empty // assert that chainID is set correctly in InitChain - chainID := app.deliverState.ctx.ChainID() - require.Equal(t, "test-chain-id", chainID, "ChainID in deliverState not set correctly in InitChain") + chainID := app.DeliverState.Ctx.ChainID() + require.Equal(t, "test-chain-id", chainID, "ChainID in DeliverState not set correctly in InitChain") - chainID = app.checkState.ctx.ChainID() - require.Equal(t, "test-chain-id", chainID, "ChainID in checkState not set correctly in InitChain") + chainID = app.CheckState.Ctx.ChainID() + require.Equal(t, "test-chain-id", chainID, "ChainID in CheckState not set correctly in InitChain") app.Commit() res = app.Query(query) require.Equal(t, value, res.Value) // reload app - app = NewBaseApp(name, logger, db, nil) + app = NewMockBaseApp(name, logger, db, nil, false) app.SetInitChainer(initChainer) app.MountStoresIAVL(capKey, capKey2) err = app.LoadLatestVersion(capKey) // needed to make stores non-nil @@ -290,8 +329,8 @@ type txTest struct { func (tx txTest) GetMsgs() []sdk.Msg { return tx.Msgs } const ( - typeMsgCounter = "msgCounter" - typeMsgCounter2 = "msgCounter2" + routeMsgCounter = "msgCounter" + routeMsgCounter2 = "msgCounter2" ) // ValidateBasic() fails on negative counters. @@ -301,7 +340,8 @@ type msgCounter struct { } // Implements Msg -func (msg msgCounter) Type() string { return typeMsgCounter } +func (msg msgCounter) Route() string { return routeMsgCounter } +func (msg msgCounter) Type() string { return "counter1" } func (msg msgCounter) GetSignBytes() []byte { return nil } func (msg msgCounter) GetSigners() []sdk.AccAddress { return nil } func (msg msgCounter) ValidateBasic() sdk.Error { @@ -310,6 +350,9 @@ func (msg msgCounter) ValidateBasic() sdk.Error { } return sdk.ErrInvalidSequence("counter should be a non-negative integer.") } +func (msg msgCounter) GetInvolvedAddresses() []sdk.AccAddress { + return msg.GetSigners() +} func newTxCounter(txInt int64, msgInts ...int64) *txTest { var msgs []sdk.Msg @@ -324,14 +367,14 @@ type msgNoRoute struct { msgCounter } -func (tx msgNoRoute) Type() string { return "noroute" } +func (tx msgNoRoute) Route() string { return "noroute" } // a msg we dont know how to decode type msgNoDecode struct { msgCounter } -func (tx msgNoDecode) Type() string { return typeMsgCounter } +func (tx msgNoDecode) Route() string { return routeMsgCounter } // Another counter msg. Duplicate of msgCounter type msgCounter2 struct { @@ -339,7 +382,8 @@ type msgCounter2 struct { } // Implements Msg -func (msg msgCounter2) Type() string { return typeMsgCounter2 } +func (msg msgCounter2) Route() string { return routeMsgCounter2 } +func (msg msgCounter2) Type() string { return "counter2" } func (msg msgCounter2) GetSignBytes() []byte { return nil } func (msg msgCounter2) GetSigners() []sdk.AccAddress { return nil } func (msg msgCounter2) ValidateBasic() sdk.Error { @@ -348,15 +392,18 @@ func (msg msgCounter2) ValidateBasic() sdk.Error { } return sdk.ErrInvalidSequence("counter should be a non-negative integer.") } +func (msg msgCounter2) GetInvolvedAddresses() []sdk.AccAddress { + return msg.GetSigners() +} // amino decode -func testTxDecoder(cdc *wire.Codec) sdk.TxDecoder { +func testTxDecoder(cdc *codec.Codec) sdk.TxDecoder { return func(txBytes []byte) (sdk.Tx, sdk.Error) { var tx txTest if len(txBytes) == 0 { return nil, sdk.ErrTxDecode("txBytes are empty") } - err := cdc.UnmarshalBinary(txBytes, &tx) + err := cdc.UnmarshalBinaryLengthPrefixed(txBytes, &tx) if err != nil { return nil, sdk.ErrTxDecode("").TraceSDK(err.Error()) } @@ -365,7 +412,7 @@ func testTxDecoder(cdc *wire.Codec) sdk.TxDecoder { } func anteHandlerTxTest(t *testing.T, capKey *sdk.KVStoreKey, storeKey []byte) sdk.AnteHandler { - return func(ctx sdk.Context, tx sdk.Tx) (newCtx sdk.Context, res sdk.Result, abort bool) { + return func(ctx sdk.Context, tx sdk.Tx, mode sdk.RunTxMode) (newCtx sdk.Context, res sdk.Result, abort bool) { store := ctx.KVStore(capKey) msgCounter := tx.(txTest).Counter res = incrementingCounter(t, store, storeKey, msgCounter) @@ -438,7 +485,7 @@ func TestCheckTx(t *testing.T) { anteOpt := func(bapp *BaseApp) { bapp.SetAnteHandler(anteHandlerTxTest(t, capKey1, counterKey)) } routerOpt := func(bapp *BaseApp) { // TODO: can remove this once CheckTx doesnt process msgs. - bapp.Router().AddRoute(typeMsgCounter, func(ctx sdk.Context, msg sdk.Msg) sdk.Result { return sdk.Result{} }) + bapp.Router().AddRoute(routeMsgCounter, func(ctx sdk.Context, msg sdk.Msg) sdk.Result { return sdk.Result{} }) } app := setupBaseApp(t, anteOpt, routerOpt) @@ -447,19 +494,19 @@ func TestCheckTx(t *testing.T) { app.InitChain(abci.RequestInitChain{}) - // Create same codec used in txDecoder - codec := wire.NewCodec() + // Create same codec used in TxDecoder + codec := codec.New() registerTestCodec(codec) for i := int64(0); i < nTxs; i++ { tx := newTxCounter(i, 0) - txBytes, err := codec.MarshalBinary(tx) + txBytes, err := codec.MarshalBinaryLengthPrefixed(tx) require.NoError(t, err) r := app.CheckTx(txBytes) assert.True(t, r.IsOK(), fmt.Sprintf("%v", r)) } - checkStateStore := app.checkState.ctx.KVStore(capKey1) + checkStateStore := app.CheckState.Ctx.KVStore(capKey1) storedCounter := getIntFromStore(checkStateStore, counterKey) // Ensure AnteHandler ran @@ -470,7 +517,7 @@ func TestCheckTx(t *testing.T) { app.EndBlock(abci.RequestEndBlock{}) app.Commit() - checkStateStore = app.checkState.ctx.KVStore(capKey1) + checkStateStore = app.CheckState.Ctx.KVStore(capKey1) storedBytes := checkStateStore.Get(counterKey) require.Nil(t, storedBytes) } @@ -484,12 +531,14 @@ func TestDeliverTx(t *testing.T) { // test increments in the handler deliverKey := []byte("deliver-key") - routerOpt := func(bapp *BaseApp) { bapp.Router().AddRoute(typeMsgCounter, handlerMsgCounter(t, capKey1, deliverKey)) } + routerOpt := func(bapp *BaseApp) { + bapp.Router().AddRoute(routeMsgCounter, handlerMsgCounter(t, capKey1, deliverKey)) + } app := setupBaseApp(t, anteOpt, routerOpt) - // Create same codec used in txDecoder - codec := wire.NewCodec() + // Create same codec used in TxDecoder + codec := codec.New() registerTestCodec(codec) nBlocks := 3 @@ -499,7 +548,7 @@ func TestDeliverTx(t *testing.T) { for i := 0; i < txPerHeight; i++ { counter := int64(blockN*txPerHeight + i) tx := newTxCounter(counter, counter) - txBytes, err := codec.MarshalBinary(tx) + txBytes, err := codec.MarshalBinaryLengthPrefixed(tx) require.NoError(t, err) res := app.DeliverTx(txBytes) require.True(t, res.IsOK(), fmt.Sprintf("%v", res)) @@ -525,86 +574,96 @@ func TestMultiMsgDeliverTx(t *testing.T) { deliverKey := []byte("deliver-key") deliverKey2 := []byte("deliver-key2") routerOpt := func(bapp *BaseApp) { - bapp.Router().AddRoute(typeMsgCounter, handlerMsgCounter(t, capKey1, deliverKey)) - bapp.Router().AddRoute(typeMsgCounter2, handlerMsgCounter(t, capKey1, deliverKey2)) + bapp.Router().AddRoute(routeMsgCounter, handlerMsgCounter(t, capKey1, deliverKey)) + bapp.Router().AddRoute(routeMsgCounter2, handlerMsgCounter(t, capKey1, deliverKey2)) } app := setupBaseApp(t, anteOpt, routerOpt) - // Create same codec used in txDecoder - codec := wire.NewCodec() + // Create same codec used in TxDecoder + codec := codec.New() registerTestCodec(codec) // run a multi-msg tx - // with all msgs the same type + // with all msgs the same route { app.BeginBlock(abci.RequestBeginBlock{}) tx := newTxCounter(0, 0, 1, 2) - txBytes, err := codec.MarshalBinary(tx) + txBytes, err := codec.MarshalBinaryLengthPrefixed(tx) require.NoError(t, err) res := app.DeliverTx(txBytes) - require.True(t, res.IsOK(), fmt.Sprintf("%v", res)) + require.True(t, res.IsErr(), fmt.Sprintf("%v", res)) + } +} - store := app.deliverState.ctx.KVStore(capKey1) +// Interleave calls to Check and Deliver and ensure +// that there is no cross-talk. Check sees results of the previous Check calls +// and Deliver sees that of the previous Deliver calls, but they don't see eachother. +func TestConcurrentCheckDeliver(t *testing.T) { + // TODO +} - // tx counter only incremented once - txCounter := getIntFromStore(store, anteKey) - require.Equal(t, int64(1), txCounter) +func TestPreCheckTx(t *testing.T) { + counterKey := []byte("counter-key") - // msg counter incremented three times - msgCounter := getIntFromStore(store, deliverKey) - require.Equal(t, int64(3), msgCounter) + anteOpt := func(bapp *BaseApp) { bapp.SetAnteHandler(anteHandlerTxTest(t, capKey1, counterKey)) } + routerOpt := func(bapp *BaseApp) { + // TODO: can remove this once CheckTx doesnt process msgs. + bapp.Router().AddRoute(routeMsgCounter, func(ctx sdk.Context, msg sdk.Msg) sdk.Result { return sdk.Result{} }) } - // replace the second message with a msgCounter2 - { - tx := newTxCounter(1, 3) - tx.Msgs = append(tx.Msgs, msgCounter2{0}) - tx.Msgs = append(tx.Msgs, msgCounter2{1}) - txBytes, err := codec.MarshalBinary(tx) - require.NoError(t, err) - res := app.DeliverTx(txBytes) - require.True(t, res.IsOK(), fmt.Sprintf("%v", res)) + app := setupBaseApp(t, anteOpt, routerOpt) + app.SetPreChecker(func(ctx sdk.Context, txBytes []byte, tx sdk.Tx) sdk.Result { + return sdk.ErrInternal("Must Fail").Result() + }) + nTxs := int64(5) - store := app.deliverState.ctx.KVStore(capKey1) + app.InitChain(abci.RequestInitChain{}) - // tx counter only incremented once - txCounter := getIntFromStore(store, anteKey) - require.Equal(t, int64(2), txCounter) + // Create same codec used in TxDecoder + codec := codec.New() + registerTestCodec(codec) - // original counter increments by one - // new counter increments by two - msgCounter := getIntFromStore(store, deliverKey) - require.Equal(t, int64(4), msgCounter) - msgCounter2 := getIntFromStore(store, deliverKey2) - require.Equal(t, int64(2), msgCounter2) + for i := int64(0); i < nTxs; i++ { + tx := newTxCounter(i, 0) + txBytes, err := codec.MarshalBinaryLengthPrefixed(tx) + require.NoError(t, err) + r := app.PreCheckTx(txBytes) + assert.False(t, r.IsOK(), fmt.Sprintf("%v", r)) } -} -// Interleave calls to Check and Deliver and ensure -// that there is no cross-talk. Check sees results of the previous Check calls -// and Deliver sees that of the previous Deliver calls, but they don't see eachother. -func TestConcurrentCheckDeliver(t *testing.T) { - // TODO + checkStateStore := app.CheckState.Ctx.KVStore(capKey1) + storedCounter := getIntFromStore(checkStateStore, counterKey) + + // Ensure AnteHandler ran + require.NotEqual(t, nTxs, storedCounter) + assert.Equal(t, 0, app.txMsgCache.Len()) + + app.SetPreChecker(func(ctx sdk.Context, txBytes []byte, tx sdk.Tx) sdk.Result { + return sdk.Result{} + }) + + tx := newTxCounter(0, 0) + txBytes, _ := codec.MarshalBinaryLengthPrefixed(tx) + r := app.PreCheckTx(txBytes) + assert.True(t, r.IsOK(), fmt.Sprintf("%v", r)) + assert.Equal(t, 1, app.txMsgCache.Len()) } -// Simulate a transaction that uses gas to compute the gas. // Simulate() and Query("/app/simulate", txBytes) should give // the same results. func TestSimulateTx(t *testing.T) { - gasConsumed := int64(5) anteOpt := func(bapp *BaseApp) { - bapp.SetAnteHandler(func(ctx sdk.Context, tx sdk.Tx) (newCtx sdk.Context, res sdk.Result, abort bool) { - newCtx = ctx.WithGasMeter(sdk.NewGasMeter(gasConsumed)) + bapp.SetAnteHandler(func(ctx sdk.Context, tx sdk.Tx, mode sdk.RunTxMode) (newCtx sdk.Context, res sdk.Result, abort bool) { + newCtx = ctx return }) } routerOpt := func(bapp *BaseApp) { - bapp.Router().AddRoute(typeMsgCounter, func(ctx sdk.Context, msg sdk.Msg) sdk.Result { - ctx.GasMeter().ConsumeGas(gasConsumed, "test") - return sdk.Result{GasUsed: ctx.GasMeter().GasConsumed()} + bapp.Router().AddRoute(routeMsgCounter, func(ctx sdk.Context, msg sdk.Msg) sdk.Result { + return sdk.Result{} }) } @@ -612,9 +671,9 @@ func TestSimulateTx(t *testing.T) { app.InitChain(abci.RequestInitChain{}) - // Create same codec used in txDecoder - codec := wire.NewCodec() - registerTestCodec(codec) + // Create same codec used in TxDecoder + cdc := codec.New() + registerTestCodec(cdc) nBlocks := 3 for blockN := 0; blockN < nBlocks; blockN++ { @@ -623,18 +682,16 @@ func TestSimulateTx(t *testing.T) { tx := newTxCounter(count, count) - // simulate a message, check gas reported + // simulate a message result := app.Simulate(tx) require.True(t, result.IsOK(), result.Log) - require.Equal(t, int64(gasConsumed), result.GasUsed) // simulate again, same result result = app.Simulate(tx) require.True(t, result.IsOK(), result.Log) - require.Equal(t, int64(gasConsumed), result.GasUsed) // simulate by calling Query with encoded tx - txBytes, err := codec.MarshalBinary(tx) + txBytes, err := cdc.MarshalBinaryLengthPrefixed(tx) require.Nil(t, err) query := abci.RequestQuery{ Path: "/app/simulate", @@ -644,265 +701,10 @@ func TestSimulateTx(t *testing.T) { require.True(t, queryResult.IsOK(), queryResult.Log) var res sdk.Result - wire.Cdc.MustUnmarshalBinary(queryResult.Value, &res) + codec.Cdc.MustUnmarshalBinaryLengthPrefixed(queryResult.Value, &res) require.Nil(t, err, "Result unmarshalling failed") require.True(t, res.IsOK(), res.Log) - require.Equal(t, gasConsumed, res.GasUsed, res.Log) app.EndBlock(abci.RequestEndBlock{}) app.Commit() } } - -//------------------------------------------------------------------------------------------- -// Tx failure cases -// TODO: add more - -func TestRunInvalidTransaction(t *testing.T) { - anteOpt := func(bapp *BaseApp) { - bapp.SetAnteHandler(func(ctx sdk.Context, tx sdk.Tx) (newCtx sdk.Context, res sdk.Result, abort bool) { return }) - } - routerOpt := func(bapp *BaseApp) { - bapp.Router().AddRoute(typeMsgCounter, func(ctx sdk.Context, msg sdk.Msg) (res sdk.Result) { return }) - } - - app := setupBaseApp(t, anteOpt, routerOpt) - app.BeginBlock(abci.RequestBeginBlock{}) - - // Transaction with no messages - { - emptyTx := &txTest{} - err := app.Deliver(emptyTx) - require.Equal(t, sdk.ToABCICode(sdk.CodespaceRoot, sdk.CodeInternal), err.Code) - } - - // Transaction where ValidateBasic fails - { - testCases := []struct { - tx *txTest - fail bool - }{ - {newTxCounter(0, 0), false}, - {newTxCounter(-1, 0), false}, - {newTxCounter(100, 100), false}, - {newTxCounter(100, 5, 4, 3, 2, 1), false}, - - {newTxCounter(0, -1), true}, - {newTxCounter(0, 1, -2), true}, - {newTxCounter(0, 1, 2, -10, 5), true}, - } - - for _, testCase := range testCases { - tx := testCase.tx - res := app.Deliver(tx) - if testCase.fail { - require.Equal(t, sdk.ToABCICode(sdk.CodespaceRoot, sdk.CodeInvalidSequence), res.Code) - } else { - require.True(t, res.IsOK(), fmt.Sprintf("%v", res)) - } - } - } - - // Transaction with no known route - { - unknownRouteTx := txTest{[]sdk.Msg{msgNoRoute{}}, 0} - err := app.Deliver(unknownRouteTx) - require.Equal(t, sdk.ToABCICode(sdk.CodespaceRoot, sdk.CodeUnknownRequest), err.Code) - - unknownRouteTx = txTest{[]sdk.Msg{msgCounter{}, msgNoRoute{}}, 0} - err = app.Deliver(unknownRouteTx) - require.Equal(t, sdk.ToABCICode(sdk.CodespaceRoot, sdk.CodeUnknownRequest), err.Code) - } - - // Transaction with an unregistered message - { - tx := newTxCounter(0, 0) - tx.Msgs = append(tx.Msgs, msgNoDecode{}) - - // new codec so we can encode the tx, but we shouldn't be able to decode - newCdc := wire.NewCodec() - registerTestCodec(newCdc) - newCdc.RegisterConcrete(&msgNoDecode{}, "cosmos-sdk/baseapp/msgNoDecode", nil) - - txBytes, err := newCdc.MarshalBinary(tx) - require.NoError(t, err) - res := app.DeliverTx(txBytes) - require.EqualValues(t, sdk.ToABCICode(sdk.CodespaceRoot, sdk.CodeTxDecode), res.Code) - } -} - -// Test that transactions exceeding gas limits fail -func TestTxGasLimits(t *testing.T) { - gasGranted := int64(10) - anteOpt := func(bapp *BaseApp) { - bapp.SetAnteHandler(func(ctx sdk.Context, tx sdk.Tx) (newCtx sdk.Context, res sdk.Result, abort bool) { - newCtx = ctx.WithGasMeter(sdk.NewGasMeter(gasGranted)) - - // NOTE/TODO/XXX: - // AnteHandlers must have their own defer/recover in order - // for the BaseApp to know how much gas was used used! - // This is because the GasMeter is created in the AnteHandler, - // but if it panics the context won't be set properly in runTx's recover ... - defer func() { - if r := recover(); r != nil { - switch rType := r.(type) { - case sdk.ErrorOutOfGas: - log := fmt.Sprintf("out of gas in location: %v", rType.Descriptor) - res = sdk.ErrOutOfGas(log).Result() - res.GasWanted = gasGranted - res.GasUsed = newCtx.GasMeter().GasConsumed() - default: - panic(r) - } - } - }() - - count := tx.(*txTest).Counter - newCtx.GasMeter().ConsumeGas(count, "counter-ante") - res = sdk.Result{ - GasWanted: gasGranted, - } - return - }) - - } - - routerOpt := func(bapp *BaseApp) { - bapp.Router().AddRoute(typeMsgCounter, func(ctx sdk.Context, msg sdk.Msg) sdk.Result { - count := msg.(msgCounter).Counter - ctx.GasMeter().ConsumeGas(count, "counter-handler") - return sdk.Result{} - }) - } - - app := setupBaseApp(t, anteOpt, routerOpt) - - app.BeginBlock(abci.RequestBeginBlock{}) - - testCases := []struct { - tx *txTest - gasUsed int64 - fail bool - }{ - {newTxCounter(0, 0), 0, false}, - {newTxCounter(1, 1), 2, false}, - {newTxCounter(9, 1), 10, false}, - {newTxCounter(1, 9), 10, false}, - {newTxCounter(10, 0), 10, false}, - {newTxCounter(0, 10), 10, false}, - {newTxCounter(0, 8, 2), 10, false}, - {newTxCounter(0, 5, 1, 1, 1, 1, 1), 10, false}, - {newTxCounter(0, 5, 1, 1, 1, 1), 9, false}, - - {newTxCounter(9, 2), 11, true}, - {newTxCounter(2, 9), 11, true}, - {newTxCounter(9, 1, 1), 11, true}, - {newTxCounter(1, 8, 1, 1), 11, true}, - {newTxCounter(11, 0), 11, true}, - {newTxCounter(0, 11), 11, true}, - {newTxCounter(0, 5, 11), 16, true}, - } - - for i, tc := range testCases { - tx := tc.tx - res := app.Deliver(tx) - - // check gas used and wanted - require.Equal(t, tc.gasUsed, res.GasUsed, fmt.Sprintf("%d: %v, %v", i, tc, res)) - - // check for out of gas - if !tc.fail { - require.True(t, res.IsOK(), fmt.Sprintf("%d: %v, %v", i, tc, res)) - } else { - require.Equal(t, res.Code, sdk.ToABCICode(sdk.CodespaceRoot, sdk.CodeOutOfGas), fmt.Sprintf("%d: %v, %v", i, tc, res)) - } - } -} - -//------------------------------------------------------------------------------------------- -// Queries - -// Test that we can only query from the latest committed state. -func TestQuery(t *testing.T) { - key, value := []byte("hello"), []byte("goodbye") - anteOpt := func(bapp *BaseApp) { - bapp.SetAnteHandler(func(ctx sdk.Context, tx sdk.Tx) (newCtx sdk.Context, res sdk.Result, abort bool) { - store := ctx.KVStore(capKey1) - store.Set(key, value) - return - }) - } - - routerOpt := func(bapp *BaseApp) { - bapp.Router().AddRoute(typeMsgCounter, func(ctx sdk.Context, msg sdk.Msg) sdk.Result { - store := ctx.KVStore(capKey1) - store.Set(key, value) - return sdk.Result{} - }) - } - - app := setupBaseApp(t, anteOpt, routerOpt) - - app.InitChain(abci.RequestInitChain{}) - - // NOTE: "/store/key1" tells us KVStore - // and the final "/key" says to use the data as the - // key in the given KVStore ... - query := abci.RequestQuery{ - Path: "/store/key1/key", - Data: key, - } - tx := newTxCounter(0, 0) - - // query is empty before we do anything - res := app.Query(query) - require.Equal(t, 0, len(res.Value)) - - // query is still empty after a CheckTx - resTx := app.Check(tx) - require.True(t, resTx.IsOK(), fmt.Sprintf("%v", resTx)) - res = app.Query(query) - require.Equal(t, 0, len(res.Value)) - - // query is still empty after a DeliverTx before we commit - app.BeginBlock(abci.RequestBeginBlock{}) - resTx = app.Deliver(tx) - require.True(t, resTx.IsOK(), fmt.Sprintf("%v", resTx)) - res = app.Query(query) - require.Equal(t, 0, len(res.Value)) - - // query returns correct value after Commit - app.Commit() - res = app.Query(query) - require.Equal(t, value, res.Value) -} - -// Test p2p filter queries -func TestP2PQuery(t *testing.T) { - addrPeerFilterOpt := func(bapp *BaseApp) { - bapp.SetAddrPeerFilter(func(addrport string) abci.ResponseQuery { - require.Equal(t, "1.1.1.1:8000", addrport) - return abci.ResponseQuery{Code: uint32(3)} - }) - } - - pubkeyPeerFilterOpt := func(bapp *BaseApp) { - bapp.SetPubKeyPeerFilter(func(pubkey string) abci.ResponseQuery { - require.Equal(t, "testpubkey", pubkey) - return abci.ResponseQuery{Code: uint32(4)} - }) - } - - app := setupBaseApp(t, addrPeerFilterOpt, pubkeyPeerFilterOpt) - - addrQuery := abci.RequestQuery{ - Path: "/p2p/filter/addr/1.1.1.1:8000", - } - res := app.Query(addrQuery) - require.Equal(t, uint32(3), res.Code) - - pubkeyQuery := abci.RequestQuery{ - Path: "/p2p/filter/pubkey/testpubkey", - } - res = app.Query(pubkeyQuery) - require.Equal(t, uint32(4), res.Code) -} diff --git a/baseapp/helpers.go b/baseapp/helpers.go index f3f1448bc..f40db0de9 100644 --- a/baseapp/helpers.go +++ b/baseapp/helpers.go @@ -4,22 +4,26 @@ import ( sdk "github.com/cosmos/cosmos-sdk/types" "github.com/tendermint/tendermint/abci/server" abci "github.com/tendermint/tendermint/abci/types" + "github.com/tendermint/tendermint/crypto/tmhash" cmn "github.com/tendermint/tendermint/libs/common" ) // nolint - Mostly for testing func (app *BaseApp) Check(tx sdk.Tx) (result sdk.Result) { - return app.runTx(runTxModeCheck, nil, tx) + txHash := cmn.HexBytes(tmhash.Sum(nil)).String() + return app.RunTx(sdk.RunTxModeCheck, nil, tx, txHash) } // nolint - full tx execution func (app *BaseApp) Simulate(tx sdk.Tx) (result sdk.Result) { - return app.runTx(runTxModeSimulate, nil, tx) + txHash := cmn.HexBytes(tmhash.Sum(nil)).String() + return app.RunTx(sdk.RunTxModeSimulate, nil, tx, txHash) } // nolint func (app *BaseApp) Deliver(tx sdk.Tx) (result sdk.Result) { - return app.runTx(runTxModeDeliver, nil, tx) + txHash := cmn.HexBytes(tmhash.Sum(nil)).String() + return app.RunTx(sdk.RunTxModeDeliver, nil, tx, txHash) } // RunForever - BasecoinApp execution and cleanup diff --git a/baseapp/options.go b/baseapp/options.go index 0a404217a..11fa411fa 100644 --- a/baseapp/options.go +++ b/baseapp/options.go @@ -1,9 +1,12 @@ +// nolint: golint package baseapp import ( "fmt" + "github.com/cosmos/cosmos-sdk/store" sdk "github.com/cosmos/cosmos-sdk/types" + dbm "github.com/tendermint/tendermint/libs/db" ) // File for storing in-package BaseApp optional functions, @@ -20,9 +23,98 @@ func SetPruning(pruning string) func(*BaseApp) { case "syncable": pruningEnum = sdk.PruneSyncable default: - panic(fmt.Sprintf("Invalid pruning strategy: %s", pruning)) + panic(fmt.Sprintf("invalid pruning strategy: %s", pruning)) } return func(bap *BaseApp) { bap.cms.SetPruning(pruningEnum) } } + +func (app *BaseApp) SetName(name string) { + if app.sealed { + panic("SetName() on sealed BaseApp") + } + app.name = name +} + +func (app *BaseApp) SetDB(db dbm.DB) { + if app.sealed { + panic("SetDB() on sealed BaseApp") + } + app.db = db +} + +func (app *BaseApp) SetCMS(cms store.CommitMultiStore) { + if app.sealed { + panic("SetEndBlocker() on sealed BaseApp") + } + app.cms = cms +} + +func (app *BaseApp) SetInitChainer(initChainer sdk.InitChainer) { + if app.sealed { + panic("SetInitChainer() on sealed BaseApp") + } + app.initChainer = initChainer +} + +func (app *BaseApp) SetBeginBlocker(beginBlocker sdk.BeginBlocker) { + if app.sealed { + panic("SetBeginBlocker() on sealed BaseApp") + } + app.beginBlocker = beginBlocker +} + +func (app *BaseApp) SetEndBlocker(endBlocker sdk.EndBlocker) { + if app.sealed { + panic("SetEndBlocker() on sealed BaseApp") + } + app.endBlocker = endBlocker +} + +func (app *BaseApp) SetAnteHandler(ah sdk.AnteHandler) { + if app.sealed { + panic("SetAnteHandler() on sealed BaseApp") + } + app.anteHandler = ah +} + +func (app *BaseApp) SetPreChecker(pc sdk.PreChecker) { + if app.sealed { + panic("SetPreChecker() on sealed BaseApp") + } + app.preChecker = pc +} + +func (app *BaseApp) SetAddrPeerFilter(pf sdk.PeerFilter) { + if app.sealed { + panic("SetAddrPeerFilter() on sealed BaseApp") + } + app.addrPeerFilter = pf +} + +func (app *BaseApp) SetPubKeyPeerFilter(pf sdk.PeerFilter) { + if app.sealed { + panic("SetPubKeyPeerFilter() on sealed BaseApp") + } + app.pubkeyPeerFilter = pf +} + +func (app *BaseApp) Router() Router { + if app.sealed { + panic("Router() on sealed BaseApp") + } + return app.router +} + +func (app *BaseApp) QueryRouter() QueryRouter { + return app.queryRouter +} + +func (app *BaseApp) Seal() { app.sealed = true } +func (app *BaseApp) IsSealed() bool { return app.sealed } +func (app *BaseApp) enforceSeal() { + if !app.sealed { + panic("enforceSeal() on BaseApp but not sealed") + } +} diff --git a/baseapp/query_test.go b/baseapp/query_test.go new file mode 100644 index 000000000..e34c374d2 --- /dev/null +++ b/baseapp/query_test.go @@ -0,0 +1,96 @@ +package baseapp + +import ( + "fmt" + "testing" + + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/stretchr/testify/require" + abci "github.com/tendermint/tendermint/abci/types" +) + +// Test that we can only query from the latest committed state. +func TestQuery(t *testing.T) { + key, value := []byte("hello"), []byte("goodbye") + anteOpt := func(bapp *BaseApp) { + bapp.SetAnteHandler(func(ctx sdk.Context, tx sdk.Tx, mode sdk.RunTxMode) (newCtx sdk.Context, res sdk.Result, abort bool) { + store := ctx.KVStore(capKey1) + store.Set(key, value) + return + }) + } + + routerOpt := func(bapp *BaseApp) { + bapp.Router().AddRoute(routeMsgCounter, func(ctx sdk.Context, msg sdk.Msg) sdk.Result { + store := ctx.KVStore(capKey1) + store.Set(key, value) + return sdk.Result{} + }) + } + + app := setupBaseApp(t, anteOpt, routerOpt) + + app.InitChain(abci.RequestInitChain{}) + + // NOTE: "/store/key1" tells us KVStore + // and the final "/key" says to use the data as the + // key in the given KVStore ... + query := abci.RequestQuery{ + Path: "/store/key1/key", + Data: key, + } + tx := newTxCounter(0, 0) + + // query is empty before we do anything + res := app.Query(query) + require.Equal(t, 0, len(res.Value)) + + // query is still empty after a CheckTx + resTx := app.Check(tx) + require.True(t, resTx.IsOK(), fmt.Sprintf("%v", resTx)) + res = app.Query(query) + require.Equal(t, 0, len(res.Value)) + + // query is still empty after a DeliverTx before we commit + app.BeginBlock(abci.RequestBeginBlock{}) + resTx = app.Deliver(tx) + require.True(t, resTx.IsOK(), fmt.Sprintf("%v", resTx)) + res = app.Query(query) + require.Equal(t, 0, len(res.Value)) + + // query returns correct value after Commit + app.Commit() + res = app.Query(query) + require.Equal(t, value, res.Value) +} + +// Test p2p filter queries +func TestP2PQuery(t *testing.T) { + addrPeerFilterOpt := func(bapp *BaseApp) { + bapp.SetAddrPeerFilter(func(addrport string) abci.ResponseQuery { + require.Equal(t, "1.1.1.1:8000", addrport) + return abci.ResponseQuery{Code: uint32(3)} + }) + } + + pubkeyPeerFilterOpt := func(bapp *BaseApp) { + bapp.SetPubKeyPeerFilter(func(pubkey string) abci.ResponseQuery { + require.Equal(t, "testpubkey", pubkey) + return abci.ResponseQuery{Code: uint32(4)} + }) + } + + app := setupBaseApp(t, addrPeerFilterOpt, pubkeyPeerFilterOpt) + + addrQuery := abci.RequestQuery{ + Path: "/p2p/filter/addr/1.1.1.1:8000", + } + res := app.Query(addrQuery) + require.Equal(t, uint32(3), res.Code) + + pubkeyQuery := abci.RequestQuery{ + Path: "/p2p/filter/pubkey/testpubkey", + } + res = app.Query(pubkeyQuery) + require.Equal(t, uint32(4), res.Code) +} diff --git a/baseapp/queryrouter.go b/baseapp/queryrouter.go new file mode 100644 index 000000000..23cfad072 --- /dev/null +++ b/baseapp/queryrouter.go @@ -0,0 +1,41 @@ +package baseapp + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" +) + +// QueryRouter provides queryables for each query path. +type QueryRouter interface { + AddRoute(r string, h sdk.Querier) (rtr QueryRouter) + Route(path string) (h sdk.Querier) +} + +type queryrouter struct { + routes map[string]sdk.Querier +} + +// nolint +// NewRouter - create new router +// TODO either make Function unexported or make return type (router) Exported +func NewQueryRouter() *queryrouter { + return &queryrouter{ + routes: map[string]sdk.Querier{}, + } +} + +// AddRoute - Adds an sdk.Querier to the route provided. Panics on duplicate +func (rtr *queryrouter) AddRoute(r string, q sdk.Querier) QueryRouter { + if !isAlphaNumeric(r) { + panic("route expressions can only contain alphanumeric characters") + } + if rtr.routes[r] != nil { + panic("route has already been initialized") + } + rtr.routes[r] = q + return rtr +} + +// Returns the sdk.Querier for a certain route path +func (rtr *queryrouter) Route(path string) (h sdk.Querier) { + return rtr.routes[path] +} diff --git a/baseapp/router.go b/baseapp/router.go index 4be3aec74..af060daf5 100644 --- a/baseapp/router.go +++ b/baseapp/router.go @@ -31,6 +31,8 @@ func NewRouter() *router { } } +var isAlpha = regexp.MustCompile(`^[a-zA-Z]+$`).MatchString + var isAlphaNumeric = regexp.MustCompile(`^[a-zA-Z0-9]+$`).MatchString // AddRoute - TODO add description @@ -43,6 +45,16 @@ func (rtr *router) AddRoute(r string, h sdk.Handler) Router { return rtr } +// AddRoute adds a msg route to the router. +func (rtr *router) AddRoute_MightNeed(r string, h sdk.Handler) Router { + if !isAlpha(r) { + panic("route expressions can only contain alphabet characters") + } + rtr.routes = append(rtr.routes, route{r, h}) + + return rtr +} + // Route - TODO add description // TODO handle expressive matches. func (rtr *router) Route(path string) (h sdk.Handler) { diff --git a/baseapp/setters.go b/baseapp/setters.go deleted file mode 100644 index 86a647d32..000000000 --- a/baseapp/setters.go +++ /dev/null @@ -1,83 +0,0 @@ -package baseapp - -import ( - dbm "github.com/tendermint/tendermint/libs/db" - - "github.com/cosmos/cosmos-sdk/store" - sdk "github.com/cosmos/cosmos-sdk/types" -) - -// nolint - Setter functions -func (app *BaseApp) SetName(name string) { - if app.sealed { - panic("SetName() on sealed BaseApp") - } - app.name = name -} -func (app *BaseApp) SetDB(db dbm.DB) { - if app.sealed { - panic("SetDB() on sealed BaseApp") - } - app.db = db -} -func (app *BaseApp) SetCMS(cms store.CommitMultiStore) { - if app.sealed { - panic("SetEndBlocker() on sealed BaseApp") - } - app.cms = cms -} -func (app *BaseApp) SetTxDecoder(txDecoder sdk.TxDecoder) { - if app.sealed { - panic("SetTxDecoder() on sealed BaseApp") - } - app.txDecoder = txDecoder -} -func (app *BaseApp) SetInitChainer(initChainer sdk.InitChainer) { - if app.sealed { - panic("SetInitChainer() on sealed BaseApp") - } - app.initChainer = initChainer -} -func (app *BaseApp) SetBeginBlocker(beginBlocker sdk.BeginBlocker) { - if app.sealed { - panic("SetBeginBlocker() on sealed BaseApp") - } - app.beginBlocker = beginBlocker -} -func (app *BaseApp) SetEndBlocker(endBlocker sdk.EndBlocker) { - if app.sealed { - panic("SetEndBlocker() on sealed BaseApp") - } - app.endBlocker = endBlocker -} -func (app *BaseApp) SetAnteHandler(ah sdk.AnteHandler) { - if app.sealed { - panic("SetAnteHandler() on sealed BaseApp") - } - app.anteHandler = ah -} -func (app *BaseApp) SetAddrPeerFilter(pf sdk.PeerFilter) { - if app.sealed { - panic("SetAddrPeerFilter() on sealed BaseApp") - } - app.addrPeerFilter = pf -} -func (app *BaseApp) SetPubKeyPeerFilter(pf sdk.PeerFilter) { - if app.sealed { - panic("SetPubKeyPeerFilter() on sealed BaseApp") - } - app.pubkeyPeerFilter = pf -} -func (app *BaseApp) Router() Router { - if app.sealed { - panic("Router() on sealed BaseApp") - } - return app.router -} -func (app *BaseApp) Seal() { app.sealed = true } -func (app *BaseApp) IsSealed() bool { return app.sealed } -func (app *BaseApp) enforceSeal() { - if !app.sealed { - panic("enforceSeal() on BaseApp but not sealed") - } -} diff --git a/client/config.go b/client/config.go new file mode 100644 index 000000000..a1d38a016 --- /dev/null +++ b/client/config.go @@ -0,0 +1,131 @@ +package client + +import ( + "bufio" + "fmt" + "github.com/cosmos/cosmos-sdk/types" + "github.com/mitchellh/go-homedir" + "github.com/pelletier/go-toml" + "github.com/spf13/cobra" + "io/ioutil" + "os" + "path" +) + +type cliConfig struct { + Home string `toml:"home"` + ChainID string `toml:"chain_id"` + TrustNode bool `toml:"trust_node"` + Encoding string `toml:"encoding"` + Output string `toml:"output"` + Node string `toml:"node"` + Trace bool `toml:"trace"` +} + +// ConfigCmd returns a CLI command to interactively create a +// Gaia CLI config file. +func ConfigCmd() *cobra.Command { + cfg := &cobra.Command{ + Use: "config", + Short: "Interactively creates a Gaia CLI config file", + RunE: runConfigCmd, + } + + return cfg +} + +func runConfigCmd(cmd *cobra.Command, args []string) error { + home, err := homedir.Dir() + if err != nil { + return err + } + + stdin := BufferStdin() + gaiaCLIHome, err := handleGaiaCLIHome(home, stdin) + if err != nil { + return err + } + node, err := handleNode(stdin) + if err != nil { + return err + } + trustNode, err := handleTrustNode(stdin) + if err != nil { + return err + } + + encoding := "btc" + output := "text" + var chainID string + chainID, err = types.DefaultChainID() + if err != nil { + fmt.Println("Couldn't populate ChainID, so using an empty one.") + } + + cfg := &cliConfig{ + Home: gaiaCLIHome, + ChainID: chainID, + TrustNode: trustNode, + Encoding: encoding, + Output: output, + Node: node, + Trace: false, + } + + return createGaiaCLIConfig(cfg) +} + +func handleGaiaCLIHome(dir string, stdin *bufio.Reader) (string, error) { + dirName := ".gaiacli" + home, err := GetString(fmt.Sprintf("Where is your gaiacli home directory? (Default: ~/%s)", dirName), stdin) + if err != nil { + return "", err + } + + if home == "" { + home = path.Join(dir, dirName) + } + + return home, nil +} + +func handleNode(stdin *bufio.Reader) (string, error) { + defaultNode := "tcp://localhost:26657" + node, err := GetString(fmt.Sprintf("Where is your validator node running? (Default: %s)", defaultNode), stdin) + if err != nil { + return "", err + } + + if node == "" { + node = defaultNode + } + + return node, nil +} + +func handleTrustNode(stdin *bufio.Reader) (bool, error) { + return GetConfirmation("Do you trust this node?", stdin) +} + +func createGaiaCLIConfig(cfg *cliConfig) error { + cfgPath := path.Join(cfg.Home, "config") + err := os.MkdirAll(cfgPath, os.ModePerm) + if err != nil { + return err + } + + data, err := toml.Marshal(*cfg) + if err != nil { + return err + } + + cfgFile := path.Join(cfgPath, "config.toml") + if info, err := os.Stat(cfgFile); err == nil && !info.IsDir() { + err = os.Rename(cfgFile, path.Join(cfgPath, "config.toml-old")) + if err != nil { + return err + } + } + + return ioutil.WriteFile(cfgFile, data, os.ModePerm) +} diff --git a/client/context/broadcast.go b/client/context/broadcast.go new file mode 100644 index 000000000..9f88ce7b9 --- /dev/null +++ b/client/context/broadcast.go @@ -0,0 +1,170 @@ +package context + +import ( + "fmt" + "io" + + "github.com/pkg/errors" + + abci "github.com/tendermint/tendermint/abci/types" + ctypes "github.com/tendermint/tendermint/rpc/core/types" +) + +// TODO: This should get deleted eventually, and perhaps +// ctypes.ResultBroadcastTx be stripped of unused fields, and +// ctypes.ResultBroadcastTxCommit returned for tendermint RPC BroadcastTxSync. +// +// The motivation is that we want a unified type to return, and the better +// option is the one that can hold CheckTx/DeliverTx responses optionally. +func resultBroadcastTxToCommit(res *ctypes.ResultBroadcastTx) *ctypes.ResultBroadcastTxCommit { + return &ctypes.ResultBroadcastTxCommit{ + Hash: res.Hash, + // NOTE: other fields are unused for async. + } +} + +// BroadcastTx broadcasts a transactions either synchronously or asynchronously +// based on the context parameters. The result of the broadcast is parsed into +// an intermediate structure which is logged if the context has a logger +// defined. +func (ctx CLIContext) BroadcastTx(txBytes []byte) (*ctypes.ResultBroadcastTxCommit, error) { + if ctx.Async { + res, err := ctx.broadcastTxAsync(txBytes) + if err != nil { + return nil, err + } + + resCommit := resultBroadcastTxToCommit(res) + return resCommit, err + } + + return ctx.broadcastTxCommit(txBytes) +} + +// BroadcastTxAndAwaitCommit broadcasts transaction bytes to a Tendermint node +// and waits for a commit. +func (ctx CLIContext) BroadcastTxAndAwaitCommit(tx []byte) (*ctypes.ResultBroadcastTxCommit, error) { + node, err := ctx.GetNode() + if err != nil { + return nil, err + } + + res, err := node.BroadcastTxCommit(tx) + if err != nil { + return res, err + } + + if !res.CheckTx.IsOK() { + return res, errors.Errorf(res.CheckTx.Log) + } + + if !res.DeliverTx.IsOK() { + return res, errors.Errorf(res.DeliverTx.Log) + } + + return res, err +} + +// BroadcastTxSync broadcasts transaction bytes to a Tendermint node +// synchronously. +func (ctx CLIContext) BroadcastTxSync(tx []byte) (*ctypes.ResultBroadcastTx, error) { + node, err := ctx.GetNode() + if err != nil { + return nil, err + } + + res, err := node.BroadcastTxSync(tx) + if err != nil { + return res, err + } + + return res, err +} + +// BroadcastTxAsync broadcasts transaction bytes to a Tendermint node +// asynchronously. +func (ctx CLIContext) BroadcastTxAsync(tx []byte) (*ctypes.ResultBroadcastTx, error) { + node, err := ctx.GetNode() + if err != nil { + return nil, err + } + + res, err := node.BroadcastTxAsync(tx) + if err != nil { + return res, err + } + + return res, err +} + +func (ctx CLIContext) broadcastTxAsync(txBytes []byte) (*ctypes.ResultBroadcastTx, error) { + res, err := ctx.BroadcastTxAsync(txBytes) + if err != nil { + return res, err + } + + if ctx.Output != nil { + if ctx.JSON { + type toJSON struct { + TxHash string + } + + resJSON := toJSON{res.Hash.String()} + bz, err := ctx.Codec.MarshalJSON(resJSON) + if err != nil { + return res, err + } + + ctx.Output.Write(bz) + io.WriteString(ctx.Output, "\n") + } else { + io.WriteString(ctx.Output, fmt.Sprintf("async tx sent (tx hash: %s)\n", res.Hash)) + } + } + + return res, nil +} + +func (ctx CLIContext) broadcastTxCommit(txBytes []byte) (*ctypes.ResultBroadcastTxCommit, error) { + res, err := ctx.BroadcastTxAndAwaitCommit(txBytes) + if err != nil { + return res, err + } + + if ctx.JSON { + // Since JSON is intended for automated scripts, always include response in + // JSON mode. + type toJSON struct { + Height int64 + TxHash string + Response abci.ResponseDeliverTx + } + + if ctx.Output != nil { + resJSON := toJSON{res.Height, res.Hash.String(), res.DeliverTx} + bz, err := ctx.Codec.MarshalJSON(resJSON) + if err != nil { + return res, err + } + + ctx.Output.Write(bz) + io.WriteString(ctx.Output, "\n") + } + + return res, nil + } + + if ctx.Output != nil { + resStr := fmt.Sprintf("Committed at block %d (tx hash: %s)\n", res.Height, res.Hash.String()) + + if ctx.PrintResponse { + resStr = fmt.Sprintf("Committed at block %d (tx hash: %s, response: %+v)\n", + res.Height, res.Hash.String(), res.DeliverTx, + ) + } + + io.WriteString(ctx.Output, resStr) + } + + return res, nil +} diff --git a/client/context/context.go b/client/context/context.go index 1b0443b0c..b11ad8298 100644 --- a/client/context/context.go +++ b/client/context/context.go @@ -1,35 +1,56 @@ package context import ( + "bytes" + "fmt" "io" + "os" + "path/filepath" "github.com/cosmos/cosmos-sdk/client" - "github.com/cosmos/cosmos-sdk/wire" + "github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/x/auth" "github.com/spf13/viper" + "github.com/cosmos/cosmos-sdk/client/keys" + cskeys "github.com/cosmos/cosmos-sdk/crypto/keys" + "github.com/cosmos/cosmos-sdk/types" + "github.com/tendermint/tendermint/libs/cli" + "github.com/tendermint/tendermint/libs/log" + tmlite "github.com/tendermint/tendermint/lite" + tmliteProxy "github.com/tendermint/tendermint/lite/proxy" rpcclient "github.com/tendermint/tendermint/rpc/client" ) const ctxAccStoreName = "acc" +var ( + verifier tmlite.Verifier +) + // CLIContext implements a typical CLI context created in SDK modules for // transaction handling and queries. type CLIContext struct { - Codec *wire.Codec - AccDecoder auth.AccountDecoder - Client rpcclient.Client - Logger io.Writer - Height int64 - NodeURI string - FromAddressName string - AccountStore string - TrustNode bool - UseLedger bool - Async bool - JSON bool - PrintResponse bool + Codec *codec.Codec + AccDecoder auth.AccountDecoder + Client rpcclient.Client + Output io.Writer + Height int64 + NodeURI string + From string + AccountStore string + TrustNode bool + UseLedger bool + Async bool + JSON bool + PrintResponse bool + Verifier tmlite.Verifier + DryRun bool + GenerateOnly bool + fromAddress types.AccAddress + fromName string + Indent bool } // NewCLIContext returns a new initialized CLIContext with parameters from the @@ -42,22 +63,113 @@ func NewCLIContext() CLIContext { rpc = rpcclient.NewHTTP(nodeURI, "/websocket") } + from := viper.GetString(client.FlagFrom) + fromAddress, fromName := fromFields(from) + + // We need to use a single verifier for all contexts + if verifier == nil { + verifier = createVerifier() + } + return CLIContext{ - Client: rpc, - NodeURI: nodeURI, - AccountStore: ctxAccStoreName, - FromAddressName: viper.GetString(client.FlagFrom), - Height: viper.GetInt64(client.FlagHeight), - TrustNode: viper.GetBool(client.FlagTrustNode), - UseLedger: viper.GetBool(client.FlagUseLedger), - Async: viper.GetBool(client.FlagAsync), - JSON: viper.GetBool(client.FlagJson), - PrintResponse: viper.GetBool(client.FlagPrintResponse), + Client: rpc, + Output: os.Stdout, + NodeURI: nodeURI, + AccountStore: ctxAccStoreName, + From: viper.GetString(client.FlagFrom), + Height: viper.GetInt64(client.FlagHeight), + TrustNode: viper.GetBool(client.FlagTrustNode), + UseLedger: viper.GetBool(client.FlagUseLedger), + Async: viper.GetBool(client.FlagAsync), + JSON: viper.GetBool(client.FlagJson), + PrintResponse: viper.GetBool(client.FlagPrintResponse), + Verifier: verifier, + DryRun: viper.GetBool(client.FlagDryRun), + GenerateOnly: viper.GetBool(client.FlagGenerateOnly), + fromAddress: fromAddress, + fromName: fromName, + Indent: viper.GetBool(client.FlagIndentResponse), + } +} + +func createVerifier() tmlite.Verifier { + trustNodeDefined := viper.IsSet(client.FlagTrustNode) + if !trustNodeDefined { + return nil + } + + trustNode := viper.GetBool(client.FlagTrustNode) + if trustNode { + return nil + } + + chainID := viper.GetString(client.FlagChainID) + home := viper.GetString(cli.HomeFlag) + nodeURI := viper.GetString(client.FlagNode) + + var errMsg bytes.Buffer + if chainID == "" { + errMsg.WriteString("--chain-id ") + } + if home == "" { + errMsg.WriteString("--home ") } + if nodeURI == "" { + errMsg.WriteString("--node ") + } + if errMsg.Len() != 0 { + fmt.Printf("Must specify these options: %s when --trust-node is false\n", errMsg.String()) + os.Exit(1) + } + node := rpcclient.NewHTTP(nodeURI, "/websocket") + cacheSize := 10 // TODO: determine appropriate cache size + verifier, err := tmliteProxy.NewVerifier( + chainID, filepath.Join(home, ".gaialite"), + node, log.NewNopLogger(), cacheSize, + ) + + if err != nil { + fmt.Printf("Create verifier failed: %s\n", err.Error()) + fmt.Printf("Please check network connection and verify the address of the node to connect to\n") + os.Exit(1) + } + + return verifier +} + +func fromFields(from string) (fromAddr types.AccAddress, fromName string) { + if from == "" { + return nil, "" + } + + keybase, err := keys.GetKeyBase() + if err != nil { + fmt.Println("no keybase found") + os.Exit(1) + } + + var info cskeys.Info + if addr, err := types.AccAddressFromBech32(from); err == nil { + info, err = keybase.GetByAddress(addr) + if err != nil { + fmt.Printf("could not find key %s\n", from) + os.Exit(1) + } + } else { + info, err = keybase.Get(from) + if err != nil { + fmt.Printf("could not find key %s\n", from) + os.Exit(1) + } + } + + fromAddr = info.GetAddress() + fromName = info.GetName() + return } // WithCodec returns a copy of the context with an updated codec. -func (ctx CLIContext) WithCodec(cdc *wire.Codec) CLIContext { +func (ctx CLIContext) WithCodec(cdc *codec.Codec) CLIContext { ctx.Codec = cdc return ctx } @@ -69,9 +181,9 @@ func (ctx CLIContext) WithAccountDecoder(decoder auth.AccountDecoder) CLIContext return ctx } -// WithLogger returns a copy of the context with an updated logger. -func (ctx CLIContext) WithLogger(w io.Writer) CLIContext { - ctx.Logger = w +// WithOutput returns a copy of the context with an updated output writer (e.g. stdout). +func (ctx CLIContext) WithOutput(w io.Writer) CLIContext { + ctx.Output = w return ctx } @@ -81,10 +193,9 @@ func (ctx CLIContext) WithAccountStore(accountStore string) CLIContext { return ctx } -// WithFromAddressName returns a copy of the context with an updated from -// address. -func (ctx CLIContext) WithFromAddressName(addrName string) CLIContext { - ctx.FromAddressName = addrName +// WithFrom returns a copy of the context with an updated from address or name. +func (ctx CLIContext) WithFrom(from string) CLIContext { + ctx.From = from return ctx } @@ -113,3 +224,9 @@ func (ctx CLIContext) WithUseLedger(useLedger bool) CLIContext { ctx.UseLedger = useLedger return ctx } + +// WithVerifier - return a copy of the context with an updated Verifier +func (ctx CLIContext) WithVerifier(verifier tmlite.Verifier) CLIContext { + ctx.Verifier = verifier + return ctx +} diff --git a/client/context/errors.go b/client/context/errors.go index 9c611494a..de96aaa18 100644 --- a/client/context/errors.go +++ b/client/context/errors.go @@ -11,3 +11,11 @@ func ErrInvalidAccount(addr sdk.AccAddress) error { return errors.Errorf(`No account with address %s was found in the state. Are you sure there has been a transaction involving it?`, addr) } + +// ErrVerifyCommit returns a common error reflecting that the blockchain commit at a given +// height can't be verified. The reason is that the base checkpoint of the certifier is +// newer than the given height +func ErrVerifyCommit(height int64) error { + return errors.Errorf(`The height of base truststore in gaia-lite is higher than height %d. +Can't verify blockchain proof at this height. Please set --trust-node to true and try again`, height) +} diff --git a/client/context/query.go b/client/context/query.go index 081f723b5..7e4c237f8 100644 --- a/client/context/query.go +++ b/client/context/query.go @@ -2,18 +2,20 @@ package context import ( "fmt" - "io" - - "github.com/cosmos/cosmos-sdk/client/keys" - sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/cosmos/cosmos-sdk/x/auth" + "strings" "github.com/pkg/errors" - - "github.com/tendermint/tendermint/libs/common" + abci "github.com/tendermint/tendermint/abci/types" + "github.com/tendermint/tendermint/crypto/merkle" cmn "github.com/tendermint/tendermint/libs/common" + tmliteErr "github.com/tendermint/tendermint/lite/errors" + tmliteProxy "github.com/tendermint/tendermint/lite/proxy" rpcclient "github.com/tendermint/tendermint/rpc/client" - ctypes "github.com/tendermint/tendermint/rpc/core/types" + tmtypes "github.com/tendermint/tendermint/types" + + "github.com/cosmos/cosmos-sdk/store" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/x/auth" ) // GetNode returns an RPC client. If the context's client is not defined, an @@ -27,8 +29,13 @@ func (ctx CLIContext) GetNode() (rpcclient.Client, error) { } // Query performs a query for information about the connected node. -func (ctx CLIContext) Query(path string) (res []byte, err error) { - return ctx.query(path, nil) +func (ctx CLIContext) Query(path string, data cmn.HexBytes) (res []byte, err error) { + return ctx.query(path, data) +} + +// Query information about the connected node with a data payload +func (ctx CLIContext) QueryWithData(path string, data []byte) (res []byte, err error) { + return ctx.query(path, data) } // QueryStore performs a query from a Tendermint node with the provided key and @@ -45,13 +52,13 @@ func (ctx CLIContext) QuerySubspace(subspace []byte, storeName string) (res []sd return res, err } - ctx.Codec.MustUnmarshalBinary(resRaw, &res) + ctx.Codec.MustUnmarshalBinaryLengthPrefixed(resRaw, &res) return } // GetAccount queries for an account given an address and a block height. An // error is returned if the query or decoding fails. -func (ctx CLIContext) GetAccount(address []byte) (auth.Account, error) { +func (ctx CLIContext) GetAccount(address []byte) (sdk.Account, error) { if ctx.AccDecoder == nil { return nil, errors.New("account decoder required but not provided") } @@ -72,22 +79,13 @@ func (ctx CLIContext) GetAccount(address []byte) (auth.Account, error) { } // GetFromAddress returns the from address from the context's name. -func (ctx CLIContext) GetFromAddress() (from sdk.AccAddress, err error) { - if ctx.FromAddressName == "" { - return nil, errors.Errorf("must provide a from address name") - } - - keybase, err := keys.GetKeyBase() - if err != nil { - return nil, err - } - - info, err := keybase.Get(ctx.FromAddressName) - if err != nil { - return nil, errors.Errorf("no key for: %s", ctx.FromAddressName) - } +func (ctx CLIContext) GetFromAddress() (sdk.AccAddress, error) { + return ctx.fromAddress, nil +} - return sdk.AccAddress(info.GetPubKey().Address()), nil +// GetFromName returns the key name for the current context. +func (ctx CLIContext) GetFromName() (string, error) { + return ctx.fromName, nil } // GetAccountNumber returns the next account number for the given account @@ -112,49 +110,6 @@ func (ctx CLIContext) GetAccountSequence(address []byte) (int64, error) { return account.GetSequence(), nil } -// BroadcastTx broadcasts transaction bytes to a Tendermint node. -func (ctx CLIContext) BroadcastTx(tx []byte) (*ctypes.ResultBroadcastTxCommit, error) { - node, err := ctx.GetNode() - if err != nil { - return nil, err - } - - res, err := node.BroadcastTxCommit(tx) - if err != nil { - return res, err - } - - if !res.CheckTx.IsOK() { - return res, errors.Errorf("checkTx failed: (%d) %s", - res.CheckTx.Code, - res.CheckTx.Log) - } - - if !res.DeliverTx.IsOK() { - return res, errors.Errorf("deliverTx failed: (%d) %s", - res.DeliverTx.Code, - res.DeliverTx.Log) - } - - return res, err -} - -// BroadcastTxAsync broadcasts transaction bytes to a Tendermint node -// asynchronously. -func (ctx CLIContext) BroadcastTxAsync(tx []byte) (*ctypes.ResultBroadcastTx, error) { - node, err := ctx.GetNode() - if err != nil { - return nil, err - } - - res, err := node.BroadcastTxAsync(tx) - if err != nil { - return res, err - } - - return res, err -} - // EnsureAccountExists ensures that an account exists for a given context. An // error is returned if it does not. func (ctx CLIContext) EnsureAccountExists() error { @@ -191,116 +146,86 @@ func (ctx CLIContext) EnsureAccountExistsFromAddr(addr sdk.AccAddress) error { return nil } -// EnsureBroadcastTx broadcasts a transactions either synchronously or -// asynchronously based on the context parameters. The result of the broadcast -// is parsed into an intermediate structure which is logged if the context has -// a logger defined. -func (ctx CLIContext) EnsureBroadcastTx(txBytes []byte) error { - if ctx.Async { - return ctx.ensureBroadcastTxAsync(txBytes) +// query performs a query from a Tendermint node with the provided store name +// and path. +func (ctx CLIContext) query(path string, key cmn.HexBytes) (res []byte, err error) { + node, err := ctx.GetNode() + if err != nil { + return res, err } - return ctx.ensureBroadcastTx(txBytes) -} + opts := rpcclient.ABCIQueryOptions{ + Height: ctx.Height, + Prove: !ctx.TrustNode, + } -func (ctx CLIContext) ensureBroadcastTxAsync(txBytes []byte) error { - res, err := ctx.BroadcastTxAsync(txBytes) + result, err := node.ABCIQueryWithOptions(path, key, opts) if err != nil { - return err + return res, err } - if ctx.JSON { - type toJSON struct { - TxHash string - } - - if ctx.Logger != nil { - resJSON := toJSON{res.Hash.String()} - bz, err := ctx.Codec.MarshalJSON(resJSON) - if err != nil { - return err - } - - ctx.Logger.Write(bz) - io.WriteString(ctx.Logger, "\n") - } - } else { - if ctx.Logger != nil { - io.WriteString(ctx.Logger, fmt.Sprintf("Async tx sent (tx hash: %s)\n", res.Hash)) - } + resp := result.Response + if !resp.IsOK() { + return res, errors.Errorf(resp.Log) } - return nil -} - -func (ctx CLIContext) ensureBroadcastTx(txBytes []byte) error { - res, err := ctx.BroadcastTx(txBytes) - if err != nil { - return err + // data from trusted node or subspace query doesn't need verification + if ctx.TrustNode || !isQueryStoreWithProof(path) { + return resp.Value, nil } - if ctx.JSON { - // since JSON is intended for automated scripts, always include - // response in JSON mode. - type toJSON struct { - Height int64 - TxHash string - Response string - } - - if ctx.Logger != nil { - resJSON := toJSON{res.Height, res.Hash.String(), fmt.Sprintf("%+v", res.DeliverTx)} - bz, err := ctx.Codec.MarshalJSON(resJSON) - if err != nil { - return err - } - - ctx.Logger.Write(bz) - io.WriteString(ctx.Logger, "\n") - } - - return nil + err = ctx.verifyProof(path, resp) + if err != nil { + return nil, err } - if ctx.Logger != nil { - resStr := fmt.Sprintf("Committed at block %d (tx hash: %s)\n", res.Height, res.Hash.String()) - - if ctx.PrintResponse { - resStr = fmt.Sprintf("Committed at block %d (tx hash: %s, response: %+v)\n", - res.Height, res.Hash.String(), res.DeliverTx, - ) - } + return resp.Value, nil +} - io.WriteString(ctx.Logger, resStr) +// Verify verifies the consensus proof at given height. +func (ctx CLIContext) Verify(height int64) (tmtypes.SignedHeader, error) { + check, err := tmliteProxy.GetCertifiedCommit(height, ctx.Client, ctx.Verifier) + switch { + case tmliteErr.IsErrCommitNotFound(err): + return tmtypes.SignedHeader{}, ErrVerifyCommit(height) + case err != nil: + return tmtypes.SignedHeader{}, err } - return nil + return check, nil } -// query performs a query from a Tendermint node with the provided store name -// and path. -func (ctx CLIContext) query(path string, key common.HexBytes) (res []byte, err error) { - node, err := ctx.GetNode() - if err != nil { - return res, err +// verifyProof perform response proof verification. +func (ctx CLIContext) verifyProof(queryPath string, resp abci.ResponseQuery) error { + if ctx.Verifier == nil { + return fmt.Errorf("missing valid certifier to verify data from distrusted node") } - opts := rpcclient.ABCIQueryOptions{ - Height: ctx.Height, - Trusted: ctx.TrustNode, + // the AppHash for height H is in header H+1 + commit, err := ctx.Verify(resp.Height + 1) + if err != nil { + return err } - result, err := node.ABCIQueryWithOptions(path, key, opts) + // TODO: Instead of reconstructing, stash on CLIContext field? + prt := store.DefaultProofRuntime() + + // TODO: Better convention for path? + storeName, err := parseQueryStorePath(queryPath) if err != nil { - return res, err + return err } - resp := result.Response - if !resp.IsOK() { - return res, errors.Errorf("query failed: (%d) %s", resp.Code, resp.Log) + kp := merkle.KeyPath{} + kp = kp.AppendKey([]byte(storeName), merkle.KeyEncodingURL) + kp = kp.AppendKey(resp.Key, merkle.KeyEncodingURL) + + err = prt.VerifyValue(resp.Proof, commit.Header.AppHash, kp.String(), resp.Value) + if err != nil { + return errors.Wrap(err, "failed to prove merkle proof") } - return resp.Value, nil + return nil } // queryStore performs a query from a Tendermint node with the provided a store @@ -309,3 +234,41 @@ func (ctx CLIContext) queryStore(key cmn.HexBytes, storeName, endPath string) ([ path := fmt.Sprintf("/store/%s/%s", storeName, endPath) return ctx.query(path, key) } + +// isQueryStoreWithProof expects a format like /// +// queryType can be app or store. +func isQueryStoreWithProof(path string) bool { + if !strings.HasPrefix(path, "/") { + return false + } + + paths := strings.SplitN(path[1:], "/", 3) + if len(paths) != 3 { + return false + } + + if store.RequireProof("/" + paths[2]) { + return true + } + + return false +} + +// parseQueryStorePath expects a format like /store//key. +func parseQueryStorePath(path string) (storeName string, err error) { + if !strings.HasPrefix(path, "/") { + return "", errors.New("expected path to start with /") + } + + paths := strings.SplitN(path[1:], "/", 3) + switch { + case len(paths) != 3: + return "", errors.New("expected format like /store//key") + case paths[0] != "store": + return "", errors.New("expected format like /store//key") + case paths[2] != "key": + return "", errors.New("expected format like /store//key") + } + + return paths[1], nil +} diff --git a/client/flags.go b/client/flags.go index 8616f9e78..d9dfc3f4f 100644 --- a/client/flags.go +++ b/client/flags.go @@ -1,39 +1,50 @@ package client -import "github.com/spf13/cobra" +import ( + "github.com/spf13/cobra" + "github.com/spf13/viper" +) // nolint const ( - FlagUseLedger = "ledger" - FlagChainID = "chain-id" - FlagNode = "node" - FlagHeight = "height" - FlagGas = "gas" - FlagTrustNode = "trust-node" - FlagFrom = "from" - FlagName = "name" - FlagAccountNumber = "account-number" - FlagSequence = "sequence" - FlagMemo = "memo" - FlagFee = "fee" - FlagAsync = "async" - FlagJson = "json" - FlagPrintResponse = "print-response" + FlagUseLedger = "ledger" + FlagChainID = "chain-id" + FlagNode = "node" + FlagHeight = "height" + FlagTrustNode = "trust-node" + FlagFrom = "from" + FlagName = "name" + FlagAccountNumber = "account-number" + FlagSequence = "sequence" + FlagMemo = "memo" + FlagSource = "source" + FlagAsync = "async" + FlagJson = "json" + FlagPrintResponse = "print-response" + FlagDryRun = "dry-run" + FlagGenerateOnly = "generate-only" + FlagIndentResponse = "indent" ) // LineBreak can be included in a command list to provide a blank line // to help with readability -var LineBreak = &cobra.Command{Run: func(*cobra.Command, []string) {}} +var ( + LineBreak = &cobra.Command{Run: func(*cobra.Command, []string) {}} +) // GetCommands adds common flags to query commands func GetCommands(cmds ...*cobra.Command) []*cobra.Command { for _, c := range cmds { - // TODO: make this default false when we support proofs - c.Flags().Bool(FlagTrustNode, true, "Don't verify proofs for responses") + c.Flags().Bool(FlagIndentResponse, false, "Add indent to JSON response") + c.Flags().Bool(FlagTrustNode, false, "Trust connected full node (don't verify proofs for responses)") c.Flags().Bool(FlagUseLedger, false, "Use a connected Ledger device") c.Flags().String(FlagChainID, "", "Chain ID of tendermint node") c.Flags().String(FlagNode, "tcp://localhost:26657", ": to tendermint rpc interface for this chain") c.Flags().Int64(FlagHeight, 0, "block height to query, omit to get most recent provable block") + viper.BindPFlag(FlagTrustNode, c.Flags().Lookup(FlagTrustNode)) + viper.BindPFlag(FlagUseLedger, c.Flags().Lookup(FlagUseLedger)) + viper.BindPFlag(FlagChainID, c.Flags().Lookup(FlagChainID)) + viper.BindPFlag(FlagNode, c.Flags().Lookup(FlagNode)) } return cmds } @@ -41,18 +52,25 @@ func GetCommands(cmds ...*cobra.Command) []*cobra.Command { // PostCommands adds common flags for commands to post tx func PostCommands(cmds ...*cobra.Command) []*cobra.Command { for _, c := range cmds { - c.Flags().String(FlagFrom, "", "Name of private key with which to sign") + c.Flags().Bool(FlagIndentResponse, false, "Add indent to JSON response") + c.Flags().String(FlagFrom, "", "Name or address of private key with which to sign") c.Flags().Int64(FlagAccountNumber, 0, "AccountNumber number to sign the tx") c.Flags().Int64(FlagSequence, 0, "Sequence number to sign the tx") c.Flags().String(FlagMemo, "", "Memo to send along with transaction") - c.Flags().String(FlagFee, "", "Fee to pay along with transaction") + c.Flags().Int64(FlagSource, 0, "Source of tx") c.Flags().String(FlagChainID, "", "Chain ID of tendermint node") c.Flags().String(FlagNode, "tcp://localhost:26657", ": to tendermint rpc interface for this chain") c.Flags().Bool(FlagUseLedger, false, "Use a connected Ledger device") - c.Flags().Int64(FlagGas, 200000, "gas limit to set per-transaction") c.Flags().Bool(FlagAsync, false, "broadcast transactions asynchronously") c.Flags().Bool(FlagJson, false, "return output in json format") - c.Flags().Bool(FlagPrintResponse, false, "return tx response (only works with async = false)") + c.Flags().Bool(FlagPrintResponse, true, "return tx response (only works with async = false)") + c.Flags().Bool(FlagTrustNode, true, "Trust connected full node (don't verify proofs for responses)") + c.Flags().Bool(FlagDryRun, false, "ignore the perform a simulation of a transaction, but don't broadcast it") + c.Flags().Bool(FlagGenerateOnly, false, "build an unsigned transaction and write it to STDOUT") + viper.BindPFlag(FlagTrustNode, c.Flags().Lookup(FlagTrustNode)) + viper.BindPFlag(FlagUseLedger, c.Flags().Lookup(FlagUseLedger)) + viper.BindPFlag(FlagChainID, c.Flags().Lookup(FlagChainID)) + viper.BindPFlag(FlagNode, c.Flags().Lookup(FlagNode)) } return cmds } diff --git a/client/input.go b/client/input.go index e7d13f3bf..46c838e2e 100644 --- a/client/input.go +++ b/client/input.go @@ -7,7 +7,7 @@ import ( "strings" "github.com/bgentry/speakeasy" - isatty "github.com/mattn/go-isatty" + "github.com/mattn/go-isatty" "github.com/pkg/errors" ) @@ -24,7 +24,7 @@ func BufferStdin() *bufio.Reader { // It enforces the password length func GetPassword(prompt string, buf *bufio.Reader) (pass string, err error) { if inputIsTty() { - pass, err = speakeasy.Ask(prompt) + pass, err = speakeasy.FAsk(os.Stderr, prompt) } else { pass, err = readLineFromBuf(buf) } @@ -44,13 +44,8 @@ func GetPassword(prompt string, buf *bufio.Reader) (pass string, err error) { // GetSeed will request a seed phrase from stdin and trims off // leading/trailing spaces -func GetSeed(prompt string, buf *bufio.Reader) (seed string, err error) { - if inputIsTty() { - fmt.Println(prompt) - } - seed, err = readLineFromBuf(buf) - seed = strings.TrimSpace(seed) - return +func GetSeed(prompt string, buf *bufio.Reader) (string, error) { + return GetString(prompt, buf) } // GetCheckPassword will prompt for a password twice to verify they @@ -100,6 +95,19 @@ func GetConfirmation(prompt string, buf *bufio.Reader) (bool, error) { } } +// GetString simply returns the trimmed string output of a given reader. +func GetString(prompt string, buf *bufio.Reader) (string, error) { + if inputIsTty() && prompt != "" { + PrintPrefixed(prompt) + } + + out, err := readLineFromBuf(buf) + if err != nil { + return "", err + } + return strings.TrimSpace(out), nil +} + // inputIsTty returns true iff we have an interactive prompt, // where we can disable echo and request to repeat the password. // If false, we can optimize for piped input from another command @@ -117,3 +125,9 @@ func readLineFromBuf(buf *bufio.Reader) (string, error) { } return strings.TrimSpace(pass), nil } + +// PrintPrefixed prints a string with > prefixed for use in prompts. +func PrintPrefixed(msg string) { + msg = fmt.Sprintf("> %s\n", msg) + fmt.Fprint(os.Stderr, msg) +} diff --git a/client/keys/add.go b/client/keys/add.go index d462db1c0..6b6e38117 100644 --- a/client/keys/add.go +++ b/client/keys/add.go @@ -8,7 +8,6 @@ import ( "github.com/cosmos/cosmos-sdk/client" "github.com/gorilla/mux" - "github.com/pkg/errors" "github.com/spf13/cobra" "github.com/spf13/viper" @@ -46,7 +45,6 @@ phrase, otherwise, a new key will be generated.`, return cmd } -// nolint: gocyclo // TODO remove the above when addressing #1446 func runAddCmd(cmd *cobra.Command, args []string) error { var kb keys.Keybase @@ -62,10 +60,10 @@ func runAddCmd(cmd *cobra.Command, args []string) error { name = "inmemorykey" } else { if len(args) != 1 || len(args[0]) == 0 { - return errors.New("you must provide a name for the key") + return errMissingName() } name = args[0] - kb, err = GetKeyBase() + kb, err = GetKeyBaseWithWritePerm() if err != nil { return err } @@ -128,7 +126,8 @@ func printCreate(info keys.Info, seed string) { output := viper.Get(cli.OutputFlag) switch output { case "text": - printInfo(info) + printKeyInfo(info, Bech32KeyOutput) + // print seed unless requested not to. if !viper.GetBool(client.FlagUseLedger) && !viper.GetBool(flagNoBackup) { fmt.Println("**Important** write this seed phrase in a safe place.") @@ -144,11 +143,16 @@ func printCreate(info keys.Info, seed string) { if !viper.GetBool(flagNoBackup) { out.Seed = seed } - json, err := MarshalJSON(out) + var jsonString []byte + if viper.GetBool(client.FlagIndentResponse) { + jsonString, err = cdc.MarshalJSONIndent(out, "", " ") + } else { + jsonString, err = cdc.MarshalJSON(out) + } if err != nil { panic(err) // really shouldn't happen... } - fmt.Println(string(json)) + fmt.Println(string(jsonString)) default: panic(fmt.Sprintf("I can't speak: %s", output)) } @@ -165,75 +169,77 @@ type NewKeyBody struct { } // add new key REST handler -func AddNewKeyRequestHandler(w http.ResponseWriter, r *http.Request) { - var kb keys.Keybase - var m NewKeyBody +func AddNewKeyRequestHandler(indent bool) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + var kb keys.Keybase + var m NewKeyBody - kb, err := GetKeyBase() - if err != nil { - w.WriteHeader(500) - w.Write([]byte(err.Error())) - return - } + kb, err := GetKeyBaseWithWritePerm() + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + return + } - body, err := ioutil.ReadAll(r.Body) - err = json.Unmarshal(body, &m) + body, err := ioutil.ReadAll(r.Body) + if err != nil { + w.WriteHeader(http.StatusBadRequest) + w.Write([]byte(err.Error())) + return + } + err = json.Unmarshal(body, &m) + if err != nil { + w.WriteHeader(http.StatusBadRequest) + w.Write([]byte(err.Error())) + return + } + if m.Name == "" { + w.WriteHeader(http.StatusBadRequest) + err = errMissingName() + w.Write([]byte(err.Error())) + return + } + if m.Password == "" { + w.WriteHeader(http.StatusBadRequest) + err = errMissingPassword() + w.Write([]byte(err.Error())) + return + } - if err != nil { - w.WriteHeader(http.StatusBadRequest) - w.Write([]byte(err.Error())) - return - } - if m.Name == "" { - w.WriteHeader(http.StatusBadRequest) - w.Write([]byte("You have to specify a name for the locally stored account.")) - return - } - if m.Password == "" { - w.WriteHeader(http.StatusBadRequest) - w.Write([]byte("You have to specify a password for the locally stored account.")) - return - } + // check if already exists + infos, err := kb.List() + for _, info := range infos { + if info.GetName() == m.Name { + w.WriteHeader(http.StatusConflict) + err = errKeyNameConflict(m.Name) + w.Write([]byte(err.Error())) + return + } + } - // check if already exists - infos, err := kb.List() - for _, i := range infos { - if i.GetName() == m.Name { - w.WriteHeader(http.StatusConflict) - w.Write([]byte(fmt.Sprintf("Account with name %s already exists.", m.Name))) + // create account + seed := m.Seed + if seed == "" { + seed = getSeed(keys.Secp256k1) + } + info, err := kb.CreateKey(m.Name, seed, m.Password) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) return } - } - // create account - seed := m.Seed - if seed == "" { - seed = getSeed(keys.Secp256k1) - } - info, err := kb.CreateKey(m.Name, seed, m.Password) - if err != nil { - w.WriteHeader(http.StatusInternalServerError) - w.Write([]byte(err.Error())) - return - } - - keyOutput, err := Bech32KeyOutput(info) - if err != nil { - w.WriteHeader(http.StatusInternalServerError) - w.Write([]byte(err.Error())) - return - } + keyOutput, err := Bech32KeyOutput(info) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + return + } - keyOutput.Seed = seed + keyOutput.Seed = seed - bz, err := json.Marshal(keyOutput) - if err != nil { - w.WriteHeader(http.StatusInternalServerError) - w.Write([]byte(err.Error())) - return + PostProcessResponse(w, cdc, keyOutput, indent) } - - w.Write(bz) } // function to just a new seed to display in the UI before actually persisting it in the keybase @@ -256,5 +262,86 @@ func SeedRequestHandler(w http.ResponseWriter, r *http.Request) { algo := keys.SigningAlgo(algoType) seed := getSeed(algo) + + w.Header().Set("Content-Type", "application/json") w.Write([]byte(seed)) } + +// RecoverKeyBody is recover key request REST body +type RecoverKeyBody struct { + Password string `json:"password"` + Seed string `json:"seed"` +} + +// RecoverRequestHandler performs key recover request +func RecoverRequestHandler(indent bool) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + vars := mux.Vars(r) + name := vars["name"] + var m RecoverKeyBody + body, err := ioutil.ReadAll(r.Body) + if err != nil { + w.WriteHeader(http.StatusBadRequest) + w.Write([]byte(err.Error())) + return + } + err = cdc.UnmarshalJSON(body, &m) + if err != nil { + w.WriteHeader(http.StatusBadRequest) + w.Write([]byte(err.Error())) + return + } + + if name == "" { + w.WriteHeader(http.StatusBadRequest) + err = errMissingName() + w.Write([]byte(err.Error())) + return + } + if m.Password == "" { + w.WriteHeader(http.StatusBadRequest) + err = errMissingPassword() + w.Write([]byte(err.Error())) + return + } + if m.Seed == "" { + w.WriteHeader(http.StatusBadRequest) + err = errMissingSeed() + w.Write([]byte(err.Error())) + return + } + + kb, err := GetKeyBaseWithWritePerm() + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + return + } + // check if already exists + infos, err := kb.List() + for _, info := range infos { + if info.GetName() == name { + w.WriteHeader(http.StatusConflict) + err = errKeyNameConflict(name) + w.Write([]byte(err.Error())) + return + } + } + + info, err := kb.CreateKey(name, m.Seed, m.Password) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + return + } + + keyOutput, err := Bech32KeyOutput(info) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + return + } + + PostProcessResponse(w, cdc, keyOutput, indent) + } +} diff --git a/client/keys/wire.go b/client/keys/codec.go similarity index 70% rename from client/keys/wire.go rename to client/keys/codec.go index a163f995a..6bbb16850 100644 --- a/client/keys/wire.go +++ b/client/keys/codec.go @@ -1,14 +1,14 @@ package keys import ( - "github.com/cosmos/cosmos-sdk/wire" + "github.com/cosmos/cosmos-sdk/codec" ) -var cdc *wire.Codec +var cdc *codec.Codec func init() { - cdc = wire.NewCodec() - wire.RegisterCrypto(cdc) + cdc = codec.New() + codec.RegisterCrypto(cdc) } // marshal keys diff --git a/client/keys/delete.go b/client/keys/delete.go index 944feb4b1..406843663 100644 --- a/client/keys/delete.go +++ b/client/keys/delete.go @@ -7,6 +7,7 @@ import ( "github.com/cosmos/cosmos-sdk/client" keys "github.com/cosmos/cosmos-sdk/crypto/keys" + keyerror "github.com/cosmos/cosmos-sdk/crypto/keys/keyerror" "github.com/gorilla/mux" "github.com/spf13/cobra" @@ -25,7 +26,7 @@ func deleteKeyCommand() *cobra.Command { func runDeleteCmd(cmd *cobra.Command, args []string) error { name := args[0] - kb, err := GetKeyBase() + kb, err := GetKeyBaseWithWritePerm() if err != nil { return err } @@ -68,25 +69,32 @@ func DeleteKeyRequestHandler(w http.ResponseWriter, r *http.Request) { decoder := json.NewDecoder(r.Body) err := decoder.Decode(&m) if err != nil { - w.WriteHeader(400) + w.WriteHeader(http.StatusBadRequest) w.Write([]byte(err.Error())) return } - kb, err = GetKeyBase() + kb, err = GetKeyBaseWithWritePerm() if err != nil { - w.WriteHeader(500) + w.WriteHeader(http.StatusInternalServerError) w.Write([]byte(err.Error())) return } - // TODO handle error if key is not available or pass is wrong err = kb.Delete(name, m.Password) - if err != nil { - w.WriteHeader(500) + if keyerror.IsErrKeyNotFound(err) { + w.WriteHeader(http.StatusNotFound) + w.Write([]byte(err.Error())) + return + } else if keyerror.IsErrWrongPassword(err) { + w.WriteHeader(http.StatusUnauthorized) + w.Write([]byte(err.Error())) + return + } else if err != nil { + w.WriteHeader(http.StatusInternalServerError) w.Write([]byte(err.Error())) return } - w.WriteHeader(200) + w.WriteHeader(http.StatusOK) } diff --git a/client/keys/errors.go b/client/keys/errors.go new file mode 100644 index 000000000..9c6139d7a --- /dev/null +++ b/client/keys/errors.go @@ -0,0 +1,19 @@ +package keys + +import "fmt" + +func errKeyNameConflict(name string) error { + return fmt.Errorf("acount with name %s already exists", name) +} + +func errMissingName() error { + return fmt.Errorf("you have to specify a name for the locally stored account") +} + +func errMissingPassword() error { + return fmt.Errorf("you have to specify a password for the locally stored account") +} + +func errMissingSeed() error { + return fmt.Errorf("you have to specify seed for key recover") +} diff --git a/client/keys/list.go b/client/keys/list.go index 22f163f1d..f232fccff 100644 --- a/client/keys/list.go +++ b/client/keys/list.go @@ -1,7 +1,6 @@ package keys import ( - "encoding/json" "net/http" "github.com/spf13/cobra" @@ -35,35 +34,31 @@ func runListCmd(cmd *cobra.Command, args []string) error { // REST // query key list REST handler -func QueryKeysRequestHandler(w http.ResponseWriter, r *http.Request) { - kb, err := GetKeyBase() - if err != nil { - w.WriteHeader(500) - w.Write([]byte(err.Error())) - return - } - infos, err := kb.List() - if err != nil { - w.WriteHeader(500) - w.Write([]byte(err.Error())) - return - } - // an empty list will be JSONized as null, but we want to keep the empty list - if len(infos) == 0 { - w.Write([]byte("[]")) - return - } - keysOutput, err := Bech32KeysOutput(infos) - if err != nil { - w.WriteHeader(500) - w.Write([]byte(err.Error())) - return - } - output, err := json.MarshalIndent(keysOutput, "", " ") - if err != nil { - w.WriteHeader(500) - w.Write([]byte(err.Error())) - return +func QueryKeysRequestHandler(indent bool) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + kb, err := GetKeyBase() + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + return + } + infos, err := kb.List() + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + return + } + // an empty list will be JSONized as null, but we want to keep the empty list + if len(infos) == 0 { + PostProcessResponse(w, cdc, "[]", indent) + return + } + keysOutput, err := Bech32KeysOutput(infos) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + return + } + PostProcessResponse(w, cdc, keysOutput, indent) } - w.Write(output) } diff --git a/client/keys/mnemonic.go b/client/keys/mnemonic.go new file mode 100644 index 000000000..33270a087 --- /dev/null +++ b/client/keys/mnemonic.go @@ -0,0 +1,78 @@ +package keys + +import ( + "crypto/sha256" + "fmt" + + "github.com/cosmos/cosmos-sdk/client" + "github.com/spf13/cobra" + + bip39 "github.com/bartekn/go-bip39" +) + +const ( + flagUserEntropy = "unsafe-entropy" + + mnemonicEntropySize = 256 +) + +func mnemonicKeyCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "mnemonic", + Short: "Compute the bip39 mnemonic for some input entropy", + Long: "Create a bip39 mnemonic, sometimes called a seed phrase, by reading from the system entropy. To pass your own entropy, use --unsafe-entropy", + RunE: runMnemonicCmd, + } + cmd.Flags().Bool(flagUserEntropy, false, "Prompt the user to supply their own entropy, instead of relying on the system") + return cmd +} + +func runMnemonicCmd(cmd *cobra.Command, args []string) error { + flags := cmd.Flags() + + userEntropy, _ := flags.GetBool(flagUserEntropy) + + var entropySeed []byte + + if userEntropy { + // prompt the user to enter some entropy + buf := client.BufferStdin() + inputEntropy, err := client.GetString("> WARNING: Generate at least 256-bits of entropy and enter the results here:", buf) + if err != nil { + return err + } + if len(inputEntropy) < 43 { + return fmt.Errorf("256-bits is 43 characters in Base-64, and 100 in Base-6. You entered %v, and probably want more", len(inputEntropy)) + } + conf, err := client.GetConfirmation( + fmt.Sprintf("> Input length: %d", len(inputEntropy)), + buf) + if err != nil { + return err + } + if !conf { + return nil + } + + // hash input entropy to get entropy seed + hashedEntropy := sha256.Sum256([]byte(inputEntropy)) + entropySeed = hashedEntropy[:] + printStep() + } else { + // read entropy seed straight from crypto.Rand + var err error + entropySeed, err = bip39.NewEntropy(mnemonicEntropySize) + if err != nil { + return err + } + } + + mnemonic, err := bip39.NewMnemonic(entropySeed[:]) + if err != nil { + return err + } + + fmt.Println(mnemonic) + + return nil +} diff --git a/client/keys/new.go b/client/keys/new.go new file mode 100644 index 000000000..6d7422688 --- /dev/null +++ b/client/keys/new.go @@ -0,0 +1,188 @@ +package keys + +import ( + "fmt" + + "github.com/bartekn/go-bip39" + "github.com/pkg/errors" + "github.com/spf13/cobra" + "github.com/spf13/viper" + + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/crypto/keys" + "github.com/cosmos/cosmos-sdk/crypto/keys/hd" +) + +const ( + flagNewDefault = "default" + flagBIP44Path = "bip44-path" +) + +func newKeyCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "new", + Short: "Interactive command to derive a new private key, encrypt it, and save to disk", + Long: `Derive a new private key using an interactive command that will prompt you for each input. +Optionally specify a bip39 mnemonic, a bip39 passphrase to further secure the mnemonic, +and a bip32 HD path to derive a specific account. The key will be stored under the given name +and encrypted with the given password. The only input that is required is the encryption password.`, + Args: cobra.ExactArgs(1), + RunE: runNewCmd, + } + cmd.Flags().Bool(flagNewDefault, false, "Skip the prompts and just use the default values for everything") + cmd.Flags().Bool(client.FlagUseLedger, false, "Store a local reference to a private key on a Ledger device") + cmd.Flags().String(flagBIP44Path, "44'/714'/0'/0/0", "BIP44 path from which to derive a private key") + return cmd +} + +/* +input + - bip39 mnemonic + - bip39 passphrase + - bip44 path + - local encryption password +output + - armor encrypted private key (saved to file) +*/ +func runNewCmd(cmd *cobra.Command, args []string) error { + name := args[0] + kb, err := GetKeyBaseWithWritePerm() + if err != nil { + return err + } + + buf := client.BufferStdin() + + _, err = kb.Get(name) + if err == nil { + // account exists, ask for user confirmation + if response, err := client.GetConfirmation( + fmt.Sprintf("> override the existing name %s", name), buf); err != nil || !response { + return err + } + } + + flags := cmd.Flags() + useDefaults, _ := flags.GetBool(flagNewDefault) + bipFlag := flags.Lookup(flagBIP44Path) + + bip44Params, err := getBIP44ParamsAndPath(bipFlag.Value.String(), bipFlag.Changed || useDefaults) + if err != nil { + return err + } + + // If we're using ledger, only thing we need is the path. So generate key and + // we're done. + if viper.GetBool(client.FlagUseLedger) { + algo := keys.Secp256k1 + path := bip44Params.DerivationPath() // ccrypto.DerivationPath{44, 118, account, 0, index} + + info, err := kb.CreateLedger(name, path, algo) + if err != nil { + return err + } + + printCreate(info, "") + return nil + } + + var mnemonic string + + if !useDefaults { + mnemonic, err = client.GetString("Enter your bip39 mnemonic, or hit enter to generate one.", buf) + if err != nil { + return err + } + } + + if len(mnemonic) == 0 { + // read entropy seed straight from crypto.Rand and convert to mnemonic + entropySeed, err := bip39.NewEntropy(mnemonicEntropySize) + if err != nil { + return err + } + + mnemonic, err = bip39.NewMnemonic(entropySeed[:]) + if err != nil { + return err + } + } + + // get bip39 passphrase + var bip39Passphrase string + if !useDefaults { + printStep() + printPrefixed("Enter your bip39 passphrase. This is combined with the mnemonic to derive the seed") + + bip39Passphrase, err = client.GetString("Most users should just hit enter to use the default, \"\"", buf) + if err != nil { + return err + } + + // if they use one, make them re-enter it + if len(bip39Passphrase) != 0 { + p2, err := client.GetString("Repeat the passphrase:", buf) + if err != nil { + return err + } + + if bip39Passphrase != p2 { + return errors.New("passphrases don't match") + } + } + } + + printStep() + + // get the encryption password + encryptPassword, err := client.GetCheckPassword( + "> Enter a passphrase to encrypt your key to disk:", + "> Repeat the passphrase:", buf) + if err != nil { + return err + } + + info, err := kb.Derive(name, mnemonic, bip39Passphrase, encryptPassword, *bip44Params) + if err != nil { + return err + } + + _ = info + return nil +} + +func getBIP44ParamsAndPath(path string, flagSet bool) (*hd.BIP44Params, error) { + buf := client.BufferStdin() + bip44Path := path + + // if it wasn't set in the flag, give it a chance to overide interactively + if !flagSet { + var err error + + printStep() + + bip44Path, err = client.GetString(fmt.Sprintf("Enter your bip44 path. Default is %s\n", path), buf) + if err != nil { + return nil, err + } + + if len(bip44Path) == 0 { + bip44Path = path + } + } + + bip44params, err := hd.NewParamsFromPath(bip44Path) + if err != nil { + return nil, err + } + + return bip44params, nil +} + +func printPrefixed(msg string) { + fmt.Printf("> %s\n", msg) +} + +func printStep() { + printPrefixed("-------------------------------------") +} diff --git a/client/keys/root.go b/client/keys/root.go index c8f6aea69..b10cd2b55 100644 --- a/client/keys/root.go +++ b/client/keys/root.go @@ -19,9 +19,11 @@ func Commands() *cobra.Command { needs to sign with a private key.`, } cmd.AddCommand( + mnemonicKeyCommand(), + newKeyCommand(), addKeyCommand(), listKeysCmd, - showKeysCmd, + showKeysCmd(), client.LineBreak, deleteKeyCommand(), updateKeyCommand(), @@ -30,11 +32,12 @@ func Commands() *cobra.Command { } // resgister REST routes -func RegisterRoutes(r *mux.Router) { - r.HandleFunc("/keys", QueryKeysRequestHandler).Methods("GET") - r.HandleFunc("/keys", AddNewKeyRequestHandler).Methods("POST") +func RegisterRoutes(r *mux.Router, indent bool) { + r.HandleFunc("/keys", QueryKeysRequestHandler(indent)).Methods("GET") + r.HandleFunc("/keys", AddNewKeyRequestHandler(indent)).Methods("POST") r.HandleFunc("/keys/seed", SeedRequestHandler).Methods("GET") - r.HandleFunc("/keys/{name}", GetKeyRequestHandler).Methods("GET") + r.HandleFunc("/keys/{name}/recover", RecoverRequestHandler(indent)).Methods("POST") + r.HandleFunc("/keys/{name}", GetKeyRequestHandler(indent)).Methods("GET") r.HandleFunc("/keys/{name}", UpdateKeyRequestHandler).Methods("PUT") r.HandleFunc("/keys/{name}", DeleteKeyRequestHandler).Methods("DELETE") } diff --git a/client/keys/show.go b/client/keys/show.go index 873c45a4b..3b574544b 100644 --- a/client/keys/show.go +++ b/client/keys/show.go @@ -1,67 +1,184 @@ package keys import ( - "encoding/json" + "fmt" + "github.com/cosmos/cosmos-sdk/crypto/keys" + "github.com/tendermint/tendermint/crypto" "net/http" - keys "github.com/cosmos/cosmos-sdk/crypto/keys" + "github.com/cosmos/cosmos-sdk/crypto/keys/keyerror" + sdk "github.com/cosmos/cosmos-sdk/types" "github.com/gorilla/mux" - + "github.com/pkg/errors" "github.com/spf13/cobra" + "github.com/spf13/viper" + "github.com/tendermint/tendermint/crypto/multisig" + "github.com/tendermint/tendermint/libs/cli" +) + +const ( + // FlagAddress is the flag for the user's address on the command line. + FlagAddress = "address" + // FlagPublicKey represents the user's public key on the command line. + FlagPublicKey = "pubkey" + // FlagBechPrefix defines a desired Bech32 prefix encoding for a key. + FlagBechPrefix = "bech" + + flagMultiSigThreshold = "multisig-threshold" + defaultMultiSigKeyName = "multi" ) -var showKeysCmd = &cobra.Command{ - Use: "show ", - Short: "Show key info for the given name", - Long: `Return public details of one local key.`, - Args: cobra.ExactArgs(1), - RunE: func(cmd *cobra.Command, args []string) error { - name := args[0] - info, err := getKey(name) - if err == nil { - printInfo(info) +var _ keys.Info = (*multiSigKey)(nil) + +type multiSigKey struct { + name string + key crypto.PubKey +} + +func (m multiSigKey) GetName() string { return m.name } +func (m multiSigKey) GetType() keys.KeyType { return keys.TypeLocal } +func (m multiSigKey) GetPubKey() crypto.PubKey { return m.key } +func (m multiSigKey) GetAddress() sdk.AccAddress { return sdk.AccAddress(m.key.Address()) } + +func showKeysCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "show [name]", + Short: "Show key info for the given name", + Long: `Return public details of one local key.`, + Args: cobra.MinimumNArgs(1), + RunE: runShowCmd, + } + + cmd.Flags().String(FlagBechPrefix, "acc", "The Bech32 prefix encoding for a key (acc|val|cons)") + cmd.Flags().Bool(FlagAddress, false, "output the address only (overrides --output)") + cmd.Flags().Bool(FlagPublicKey, false, "output the public key only (overrides --output)") + cmd.Flags().Uint(flagMultiSigThreshold, 1, "K out of N required signatures") + + return cmd +} + +func runShowCmd(cmd *cobra.Command, args []string) (err error) { + var info keys.Info + + if len(args) == 1 { + info, err = GetKeyInfo(args[0]) + if err != nil { + return err + } + } else { + pks := make([]crypto.PubKey, len(args)) + for i, keyName := range args { + info, err := GetKeyInfo(keyName) + if err != nil { + return err + } + pks[i] = info.GetPubKey() + } + + multisigThreshold := viper.GetInt(flagMultiSigThreshold) + err = validateMultisigThreshold(multisigThreshold, len(args)) + if err != nil { + return err + } + multikey := multisig.NewPubKeyMultisigThreshold(multisigThreshold, pks) + info = multiSigKey{ + name: defaultMultiSigKeyName, + key: multikey, } + } + + isShowAddr := viper.GetBool(FlagAddress) + isShowPubKey := viper.GetBool(FlagPublicKey) + isOutputSet := cmd.Flag(cli.OutputFlag).Changed + + if isShowAddr && isShowPubKey { + return errors.New("cannot use both --address and --pubkey at once") + } + + if isOutputSet && (isShowAddr || isShowPubKey) { + return errors.New("cannot use --output with --address or --pubkey") + } + + bechKeyOut, err := getBechKeyOut(viper.GetString(FlagBechPrefix)) + if err != nil { return err - }, + } + + switch { + case isShowAddr: + printKeyAddress(info, bechKeyOut) + case isShowPubKey: + printPubKey(info, bechKeyOut) + default: + printKeyInfo(info, bechKeyOut) + } + + return nil } -func getKey(name string) (keys.Info, error) { - kb, err := GetKeyBase() - if err != nil { - return nil, err +func validateMultisigThreshold(k, nKeys int) error { + if k <= 0 { + return fmt.Errorf("threshold must be a positive integer") + } + if nKeys < k { + return fmt.Errorf( + "threshold k of n multisignature: %d < %d", nKeys, k) + } + return nil +} + +func getBechKeyOut(bechPrefix string) (bechKeyOutFn, error) { + switch bechPrefix { + case "acc": + return Bech32KeyOutput, nil + case "val": + return Bech32ValKeyOutput, nil + case "cons": + return Bech32ConsKeyOutput, nil } - return kb.Get(name) + return nil, fmt.Errorf("invalid Bech32 prefix encoding provided: %s", bechPrefix) } /////////////////////////// // REST // get key REST handler -func GetKeyRequestHandler(w http.ResponseWriter, r *http.Request) { - vars := mux.Vars(r) - name := vars["name"] +func GetKeyRequestHandler(indent bool) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + vars := mux.Vars(r) + name := vars["name"] + bechPrefix := r.URL.Query().Get(FlagBechPrefix) - info, err := getKey(name) - // TODO check for the error if key actually does not exist, instead of assuming this as the reason - if err != nil { - w.WriteHeader(404) - w.Write([]byte(err.Error())) - return - } + if bechPrefix == "" { + bechPrefix = "acc" + } - keyOutput, err := Bech32KeyOutput(info) - if err != nil { - w.WriteHeader(500) - w.Write([]byte(err.Error())) - return - } - output, err := json.MarshalIndent(keyOutput, "", " ") - if err != nil { - w.WriteHeader(500) - w.Write([]byte(err.Error())) - return - } + bechKeyOut, err := getBechKeyOut(bechPrefix) + if err != nil { + w.WriteHeader(http.StatusBadRequest) + w.Write([]byte(err.Error())) + return + } + + info, err := GetKeyInfo(name) + if keyerror.IsErrKeyNotFound(err) { + w.WriteHeader(http.StatusNotFound) + w.Write([]byte(err.Error())) + return + } else if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + return + } - w.Write(output) + keyOutput, err := bechKeyOut(info) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + return + } + + PostProcessResponse(w, cdc, keyOutput, indent) + } } diff --git a/client/keys/update.go b/client/keys/update.go index 78a81bf0e..2489bce12 100644 --- a/client/keys/update.go +++ b/client/keys/update.go @@ -9,6 +9,7 @@ import ( keys "github.com/cosmos/cosmos-sdk/crypto/keys" "github.com/gorilla/mux" + "github.com/cosmos/cosmos-sdk/crypto/keys/keyerror" "github.com/spf13/cobra" ) @@ -26,7 +27,7 @@ func runUpdateCmd(cmd *cobra.Command, args []string) error { name := args[0] buf := client.BufferStdin() - kb, err := GetKeyBase() + kb, err := GetKeyBaseWithWritePerm() if err != nil { return err } @@ -69,27 +70,35 @@ func UpdateKeyRequestHandler(w http.ResponseWriter, r *http.Request) { decoder := json.NewDecoder(r.Body) err := decoder.Decode(&m) if err != nil { - w.WriteHeader(400) + w.WriteHeader(http.StatusBadRequest) w.Write([]byte(err.Error())) return } - kb, err = GetKeyBase() + kb, err = GetKeyBaseWithWritePerm() if err != nil { - w.WriteHeader(500) + w.WriteHeader(http.StatusInternalServerError) w.Write([]byte(err.Error())) return } getNewpass := func() (string, error) { return m.NewPassword, nil } - // TODO check if account exists and if password is correct err = kb.Update(name, m.OldPassword, getNewpass) - if err != nil { - w.WriteHeader(401) + if keyerror.IsErrKeyNotFound(err) { + w.WriteHeader(http.StatusNotFound) + w.Write([]byte(err.Error())) + return + } else if keyerror.IsErrWrongPassword(err) { + w.WriteHeader(http.StatusUnauthorized) + w.Write([]byte(err.Error())) + return + } else if err != nil { + w.WriteHeader(http.StatusInternalServerError) w.Write([]byte(err.Error())) return } - w.WriteHeader(200) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) } diff --git a/client/keys/utils.go b/client/keys/utils.go index 907f9eda8..8a7eefea3 100644 --- a/client/keys/utils.go +++ b/client/keys/utils.go @@ -2,17 +2,20 @@ package keys import ( "fmt" + "github.com/syndtr/goleveldb/leveldb/opt" "path/filepath" "github.com/spf13/viper" - keys "github.com/cosmos/cosmos-sdk/crypto/keys" + "github.com/cosmos/cosmos-sdk/crypto/keys" "github.com/tendermint/tendermint/libs/cli" dbm "github.com/tendermint/tendermint/libs/db" "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/codec" sdk "github.com/cosmos/cosmos-sdk/types" + "net/http" ) // KeyDBName is the directory under root where we store the keys @@ -21,13 +24,7 @@ const KeyDBName = "keys" // keybase is used to make GetKeyBase a singleton var keybase keys.Keybase -// TODO make keybase take a database not load from the directory - -// initialize a keybase based on the configuration -func GetKeyBase() (keys.Keybase, error) { - rootDir := viper.GetString(cli.HomeFlag) - return GetKeyBaseFromDir(rootDir) -} +type bechKeyOutFn func(keyInfo keys.Info) (KeyOutput, error) // GetKeyInfo returns key info for a given name. An error is returned if the // keybase cannot be retrieved or getting the info fails. @@ -78,10 +75,33 @@ func ReadPassphraseFromStdin(name string) (string, error) { return passphrase, nil } -// initialize a keybase based on the configuration +// TODO make keybase take a database not load from the directory + +// GetKeyBase initializes a read-only KeyBase based on the configuration. +func GetKeyBase() (keys.Keybase, error) { + rootDir := viper.GetString(cli.HomeFlag) + return GetKeyBaseFromDir(rootDir) +} + +// GetKeyBaseWithWritePerm initialize a keybase based on the configuration with write permissions. +func GetKeyBaseWithWritePerm() (keys.Keybase, error) { + rootDir := viper.GetString(cli.HomeFlag) + return GetKeyBaseFromDirWithWritePerm(rootDir) +} + +// GetKeyBaseFromDirWithWritePerm initializes a keybase at a particular dir with write permissions. +func GetKeyBaseFromDirWithWritePerm(rootDir string) (keys.Keybase, error) { + return getKeyBaseFromDirWithOpts(rootDir, nil) +} + +// GetKeyBaseFromDir initializes a read-only keybase at a particular dir. func GetKeyBaseFromDir(rootDir string) (keys.Keybase, error) { + return getKeyBaseFromDirWithOpts(rootDir, &opt.Options{ReadOnly: true}) +} + +func getKeyBaseFromDirWithOpts(rootDir string, o *opt.Options) (keys.Keybase, error) { if keybase == nil { - db, err := dbm.NewGoLevelDB(KeyDBName, filepath.Join(rootDir, "keys")) + db, err := dbm.NewGoLevelDBWithOpts(KeyDBName, filepath.Join(rootDir, "keys"), o) if err != nil { return nil, err } @@ -97,11 +117,11 @@ func SetKeyBase(kb keys.Keybase) { // used for outputting keys.Info over REST type KeyOutput struct { - Name string `json:"name"` - Type string `json:"type"` - Address sdk.AccAddress `json:"address"` - PubKey string `json:"pub_key"` - Seed string `json:"seed,omitempty"` + Name string `json:"name"` + Type string `json:"type"` + Address string `json:"address"` + PubKey string `json:"pub_key"` + Seed string `json:"seed,omitempty"` } // create a list of KeyOutput in bech32 format @@ -119,24 +139,61 @@ func Bech32KeysOutput(infos []keys.Info) ([]KeyOutput, error) { // create a KeyOutput in bech32 format func Bech32KeyOutput(info keys.Info) (KeyOutput, error) { - account := sdk.AccAddress(info.GetPubKey().Address().Bytes()) + accAddr := sdk.AccAddress(info.GetPubKey().Address().Bytes()) bechPubKey, err := sdk.Bech32ifyAccPub(info.GetPubKey()) if err != nil { return KeyOutput{}, err } + return KeyOutput{ Name: info.GetName(), Type: info.GetType().String(), - Address: account, + Address: accAddr.String(), PubKey: bechPubKey, }, nil } -func printInfo(info keys.Info) { - ko, err := Bech32KeyOutput(info) +// Bech32ConsKeyOutput returns key output for a consensus node's key +// information. +func Bech32ConsKeyOutput(keyInfo keys.Info) (KeyOutput, error) { + consAddr := sdk.ConsAddress(keyInfo.GetPubKey().Address().Bytes()) + + bechPubKey, err := sdk.Bech32ifyConsPub(keyInfo.GetPubKey()) + if err != nil { + return KeyOutput{}, err + } + + return KeyOutput{ + Name: keyInfo.GetName(), + Type: keyInfo.GetType().String(), + Address: consAddr.String(), + PubKey: bechPubKey, + }, nil +} + +// Bech32ValKeyOutput returns key output for a validator's key information. +func Bech32ValKeyOutput(keyInfo keys.Info) (KeyOutput, error) { + valAddr := sdk.ValAddress(keyInfo.GetPubKey().Address().Bytes()) + + bechPubKey, err := sdk.Bech32ifyValPub(keyInfo.GetPubKey()) + if err != nil { + return KeyOutput{}, err + } + + return KeyOutput{ + Name: keyInfo.GetName(), + Type: keyInfo.GetType().String(), + Address: valAddr.String(), + PubKey: bechPubKey, + }, nil +} + +func printKeyInfo(keyInfo keys.Info, bechKeyOut bechKeyOutFn) { + ko, err := bechKeyOut(keyInfo) if err != nil { panic(err) } + switch viper.Get(cli.OutputFlag) { case "text": fmt.Printf("NAME:\tTYPE:\tADDRESS:\t\t\t\t\t\tPUBKEY:\n") @@ -146,6 +203,7 @@ func printInfo(info keys.Info) { if err != nil { panic(err) } + fmt.Println(string(out)) } } @@ -173,3 +231,44 @@ func printInfos(infos []keys.Info) { func printKeyOutput(ko KeyOutput) { fmt.Printf("%s\t%s\t%s\t%s\n", ko.Name, ko.Type, ko.Address, ko.PubKey) } + +func printKeyAddress(info keys.Info, bechKeyOut bechKeyOutFn) { + ko, err := bechKeyOut(info) + if err != nil { + panic(err) + } + + fmt.Println(ko.Address) +} + +func printPubKey(info keys.Info, bechKeyOut bechKeyOutFn) { + ko, err := bechKeyOut(info) + if err != nil { + panic(err) + } + + fmt.Println(ko.PubKey) +} + +// PostProcessResponse performs post process for rest response +func PostProcessResponse(w http.ResponseWriter, cdc *codec.Codec, response interface{}, indent bool) { + var output []byte + switch response.(type) { + default: + var err error + if indent { + output, err = cdc.MarshalJSONIndent(response, "", " ") + } else { + output, err = cdc.MarshalJSON(response) + } + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + return + } + case []byte: + output = response.([]byte) + } + w.Header().Set("Content-Type", "application/json") + w.Write(output) +} diff --git a/client/keys/utils_test.go b/client/keys/utils_test.go new file mode 100644 index 000000000..6b65bb55a --- /dev/null +++ b/client/keys/utils_test.go @@ -0,0 +1,39 @@ +package keys + +import ( + "github.com/cosmos/cosmos-sdk/crypto/keys" + "github.com/stretchr/testify/require" + "io/ioutil" + "os" + "testing" +) + +func TestGetKeyBaseLocks(t *testing.T) { + dir, err := ioutil.TempDir("", "cosmos-sdk-keys") + require.Nil(t, err) + defer os.RemoveAll(dir) + + // Acquire db + kb, err := GetKeyBaseFromDirWithWritePerm(dir) + require.Nil(t, err) + _, _, err = kb.CreateMnemonic("foo", keys.English, "12345678", keys.Secp256k1) + require.Nil(t, err) + // Reset global variable + keybase = nil + // Try to acquire another keybase from the same storage + _, err = GetKeyBaseFromDirWithWritePerm(dir) + require.NotNil(t, err) + _, err = GetKeyBaseFromDirWithWritePerm(dir) + require.NotNil(t, err) + + // Close the db and try to acquire the lock + kb.CloseDB() + kb, err = GetKeyBaseFromDirWithWritePerm(dir) + require.Nil(t, err) + + // Try to acquire another read-only keybase from the same storage + _, err = GetKeyBaseFromDir(dir) + require.Nil(t, err) + + kb.CloseDB() +} diff --git a/client/lcd/certificates.go b/client/lcd/certificates.go new file mode 100644 index 000000000..1516ed35a --- /dev/null +++ b/client/lcd/certificates.go @@ -0,0 +1,174 @@ +package lcd + +import ( + "bytes" + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "crypto/sha256" + "crypto/x509" + "crypto/x509/pkix" + "encoding/pem" + "errors" + "fmt" + "io/ioutil" + "math/big" + "net" + "os" + "strings" + "time" +) + +// default: 30 days +const defaultValidFor = 30 * 24 * time.Hour + +func generateSelfSignedCert(host string) (certBytes []byte, priv *ecdsa.PrivateKey, err error) { + priv, err = ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + notBefore := time.Now() + notAfter := notBefore.Add(defaultValidFor) + serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128) + serialNumber, err := rand.Int(rand.Reader, serialNumberLimit) + if err != nil { + err = fmt.Errorf("failed to generate serial number: %s", err) + return + } + + template := x509.Certificate{ + SerialNumber: serialNumber, + Subject: pkix.Name{ + Organization: []string{"Gaia Lite"}, + }, + NotBefore: notBefore, + NotAfter: notAfter, + KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign, + ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth}, + BasicConstraintsValid: true, + IsCA: true, + } + hosts := strings.Split(host, ",") + for _, h := range hosts { + if ip := net.ParseIP(h); ip != nil { + template.IPAddresses = append(template.IPAddresses, ip) + } else { + template.DNSNames = append(template.DNSNames, h) + } + } + + certBytes, err = x509.CreateCertificate(rand.Reader, &template, &template, &priv.PublicKey, priv) + if err != nil { + err = fmt.Errorf("couldn't create certificate: %s", err) + return + } + return +} + +func writeCertAndPrivKey(certBytes []byte, priv *ecdsa.PrivateKey) (certFile string, keyFile string, err error) { + if priv == nil { + err = errors.New("private key is nil") + return + } + certFile, err = writeCertificateFile(certBytes) + if err != nil { + return + } + keyFile, err = writeKeyFile(priv) + return +} + +func writeCertificateFile(certBytes []byte) (filename string, err error) { + f, err := ioutil.TempFile("", "cert_") + if err != nil { + return + } + defer f.Close() + filename = f.Name() + if err := pem.Encode(f, &pem.Block{Type: "CERTIFICATE", Bytes: certBytes}); err != nil { + return filename, fmt.Errorf("failed to write data to %s: %s", filename, err) + } + return +} + +func writeKeyFile(priv *ecdsa.PrivateKey) (filename string, err error) { + f, err := ioutil.TempFile("", "key_") + if err != nil { + return + } + defer f.Close() + filename = f.Name() + block, err := pemBlockForKey(priv) + if err != nil { + return + } + if err := pem.Encode(f, block); err != nil { + return filename, fmt.Errorf("failed to write data to %s: %s", filename, err) + } + return +} + +func pemBlockForKey(priv *ecdsa.PrivateKey) (*pem.Block, error) { + b, err := x509.MarshalECPrivateKey(priv) + if err != nil { + return nil, fmt.Errorf("unable to marshal ECDSA private key: %v", err) + } + return &pem.Block{Type: "EC PRIVATE KEY", Bytes: b}, nil + +} + +func genCertKeyFilesAndReturnFingerprint(sslHosts string) (certFile, keyFile string, fingerprint string, err error) { + certBytes, priv, err := generateSelfSignedCert(sslHosts) + if err != nil { + return + } + certFile, keyFile, err = writeCertAndPrivKey(certBytes, priv) + cleanupFunc := func() { + os.Remove(certFile) + os.Remove(keyFile) + } + // Either of the files could have been written already, + // thus clean up regardless of the error. + if err != nil { + defer cleanupFunc() + return + } + fingerprint, err = fingerprintForCertificate(certBytes) + if err != nil { + defer cleanupFunc() + return + } + return +} + +func fingerprintForCertificate(certBytes []byte) (string, error) { + cert, err := x509.ParseCertificate(certBytes) + if err != nil { + return "", err + } + h := sha256.New() + h.Write(cert.Raw) + fingerprintBytes := h.Sum(nil) + var buf bytes.Buffer + for i, b := range fingerprintBytes { + if i > 0 { + fmt.Fprintf(&buf, ":") + } + fmt.Fprintf(&buf, "%02X", b) + } + return fmt.Sprintf("SHA256 Fingerprint=%s", buf.String()), nil +} + +func fingerprintFromFile(certFile string) (string, error) { + f, err := os.Open(certFile) + if err != nil { + return "", err + } + defer f.Close() + data, err := ioutil.ReadAll(f) + if err != nil { + return "", err + } + block, _ := pem.Decode(data) + if block == nil { + return "", fmt.Errorf("couldn't find PEM data in %s", certFile) + } + return fingerprintForCertificate(block.Bytes) +} diff --git a/client/lcd/certificates_test.go b/client/lcd/certificates_test.go new file mode 100644 index 000000000..14bddfa0f --- /dev/null +++ b/client/lcd/certificates_test.go @@ -0,0 +1,93 @@ +package lcd + +import ( + "crypto/ecdsa" + "crypto/x509" + "io/ioutil" + "os" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestGenerateSelfSignedCert(t *testing.T) { + host := "127.0.0.1,localhost,::1" + certBytes, _, err := generateSelfSignedCert(host) + require.Nil(t, err) + cert, err := x509.ParseCertificate(certBytes) + require.Nil(t, err) + require.Equal(t, 2, len(cert.IPAddresses)) + require.Equal(t, 1, len(cert.DNSNames)) + require.True(t, cert.IsCA) +} + +func TestWriteCertAndPrivKey(t *testing.T) { + expectedPerm := "-rw-------" + derBytes, priv, err := generateSelfSignedCert("localhost") + require.Nil(t, err) + type args struct { + certBytes []byte + priv *ecdsa.PrivateKey + } + tests := []struct { + name string + args args + wantErr bool + }{ + {"valid certificate", args{derBytes, priv}, false}, + {"garbage", args{[]byte("some garbage"), nil}, true}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotCertFile, gotKeyFile, err := writeCertAndPrivKey(tt.args.certBytes, tt.args.priv) + defer os.Remove(gotCertFile) + defer os.Remove(gotKeyFile) + if tt.wantErr { + require.NotNil(t, err) + return + } + require.Nil(t, err) + info, err := os.Stat(gotCertFile) + require.Nil(t, err) + require.True(t, info.Mode().IsRegular()) + require.Equal(t, expectedPerm, info.Mode().String()) + info, err = os.Stat(gotKeyFile) + require.Nil(t, err) + require.True(t, info.Mode().IsRegular()) + require.Equal(t, expectedPerm, info.Mode().String()) + }) + } +} + +func TestFingerprintFromFile(t *testing.T) { + cert := `-----BEGIN CERTIFICATE----- +MIIBbDCCARGgAwIBAgIQSuFKYv/22v+cxtVgMUrQADAKBggqhkjOPQQDAjASMRAw +DgYDVQQKEwdBY21lIENvMB4XDTE4MDkyMDIzNDQyNloXDTE5MDkyMDIzNDQyNlow +EjEQMA4GA1UEChMHQWNtZSBDbzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABDIo +ujAesRczcPVAWiLhpeV1B7hS/RI2LJaGj3QjyJ8hiUthJTPIamr8m7LuS/U5fS0o +hY297YeTIGo9YkxClICjSTBHMA4GA1UdDwEB/wQEAwICpDATBgNVHSUEDDAKBggr +BgEFBQcDATAPBgNVHRMBAf8EBTADAQH/MA8GA1UdEQQIMAaHBH8AAAEwCgYIKoZI +zj0EAwIDSQAwRgIhAKnwbhX9FrGG1otCVLwhClQ3RaLxnNpCgIGTqSimb34cAiEA +stMN+IqMCKWlZyGqxGIiyksMLMEU3lRqKNQn2EoAZJY= +-----END CERTIFICATE-----` + wantFingerprint := `SHA256 Fingerprint=0B:ED:9A:AA:A2:D1:7E:B2:53:56:F6:FC:C0:E6:1A:69:70:21:A2:B0:90:FC:AF:BB:EF:AE:2C:78:52:AB:68:40` + certFile, err := ioutil.TempFile("", "test_cert_") + require.Nil(t, err) + _, err = certFile.Write([]byte(cert)) + require.Nil(t, err) + err = certFile.Close() + require.Nil(t, err) + defer os.Remove(certFile.Name()) + fingerprint, err := fingerprintFromFile(certFile.Name()) + require.Nil(t, err) + require.Equal(t, wantFingerprint, fingerprint) + + // test failure + emptyFile, err := ioutil.TempFile("", "test_cert_") + require.Nil(t, err) + err = emptyFile.Close() + require.Nil(t, err) + defer os.Remove(emptyFile.Name()) + _, err = fingerprintFromFile(emptyFile.Name()) + require.NotNil(t, err) +} diff --git a/client/lcd/wire.go b/client/lcd/codec.go similarity index 100% rename from client/lcd/wire.go rename to client/lcd/codec.go diff --git a/client/lcd/lcd_test.go b/client/lcd/lcd_test.go index cbc1a2c20..8ad983326 100644 --- a/client/lcd/lcd_test.go +++ b/client/lcd/lcd_test.go @@ -4,41 +4,43 @@ import ( "encoding/hex" "fmt" "net/http" + "os" "regexp" + "strconv" "testing" "time" - "github.com/cosmos/cosmos-sdk/client/tx" - "github.com/spf13/viper" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - - cryptoKeys "github.com/cosmos/cosmos-sdk/crypto/keys" - p2p "github.com/tendermint/tendermint/p2p" + "github.com/tendermint/tendermint/p2p" ctypes "github.com/tendermint/tendermint/rpc/core/types" - client "github.com/cosmos/cosmos-sdk/client" - keys "github.com/cosmos/cosmos-sdk/client/keys" - rpc "github.com/cosmos/cosmos-sdk/client/rpc" - tests "github.com/cosmos/cosmos-sdk/tests" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/keys" + "github.com/cosmos/cosmos-sdk/client/rpc" + "github.com/cosmos/cosmos-sdk/client/tx" + "github.com/cosmos/cosmos-sdk/codec" + cryptoKeys "github.com/cosmos/cosmos-sdk/crypto/keys" + "github.com/cosmos/cosmos-sdk/crypto/keys/mintkey" + "github.com/cosmos/cosmos-sdk/tests" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/cosmos/cosmos-sdk/wire" + "github.com/cosmos/cosmos-sdk/version" "github.com/cosmos/cosmos-sdk/x/auth" + authrest "github.com/cosmos/cosmos-sdk/x/auth/client/rest" "github.com/cosmos/cosmos-sdk/x/gov" "github.com/cosmos/cosmos-sdk/x/slashing" "github.com/cosmos/cosmos-sdk/x/stake" - "github.com/cosmos/cosmos-sdk/x/stake/client/rest" ) func init() { - cryptoKeys.BcryptSecurityParameter = 1 + mintkey.BcryptSecurityParameter = 1 + version.Version = os.Getenv("VERSION") } func TestKeys(t *testing.T) { name, password := "test", "1234567890" addr, seed := CreateAddr(t, "test", password, GetKeyBase(t)) - cleanup, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) + cleanup, _, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) defer cleanup() // get seed @@ -50,40 +52,44 @@ func TestKeys(t *testing.T) { match := reg.MatchString(seed) require.True(t, match, "Returned seed has wrong format", seed) + // recover key + recoverName := "test_recovername" + recoverPassword := "1234567890" + doRecoverKey(t, port, recoverName, recoverPassword, seed) + newName := "test_newname" newPassword := "0987654321" - // add key jsonStr := []byte(fmt.Sprintf(`{"name":"%s", "password":"%s", "seed":"%s"}`, newName, newPassword, seed)) res, body = Request(t, port, "POST", "/keys", jsonStr) require.Equal(t, http.StatusOK, res.StatusCode, body) var resp keys.KeyOutput - err = wire.Cdc.UnmarshalJSON([]byte(body), &resp) + err = codec.Cdc.UnmarshalJSON([]byte(body), &resp) require.Nil(t, err, body) - addr2Bech32 := resp.Address.String() + addr2Bech32 := resp.Address _, err = sdk.AccAddressFromBech32(addr2Bech32) require.NoError(t, err, "Failed to return a correct bech32 address") // test if created account is the correct account expectedInfo, _ := GetKeyBase(t).CreateKey(newName, seed, newPassword) expectedAccount := sdk.AccAddress(expectedInfo.GetPubKey().Address().Bytes()) - assert.Equal(t, expectedAccount.String(), addr2Bech32) + require.Equal(t, expectedAccount.String(), addr2Bech32) // existing keys res, body = Request(t, port, "GET", "/keys", nil) require.Equal(t, http.StatusOK, res.StatusCode, body) - var m [2]keys.KeyOutput + var m [3]keys.KeyOutput err = cdc.UnmarshalJSON([]byte(body), &m) require.Nil(t, err) addrBech32 := addr.String() require.Equal(t, name, m[0].Name, "Did not serve keys name correctly") - require.Equal(t, addrBech32, m[0].Address.String(), "Did not serve keys Address correctly") + require.Equal(t, addrBech32, m[0].Address, "Did not serve keys Address correctly") require.Equal(t, newName, m[1].Name, "Did not serve keys name correctly") - require.Equal(t, addr2Bech32, m[1].Address.String(), "Did not serve keys Address correctly") + require.Equal(t, addr2Bech32, m[1].Address, "Did not serve keys Address correctly") // select key keyEndpoint := fmt.Sprintf("/keys/%s", newName) @@ -94,7 +100,7 @@ func TestKeys(t *testing.T) { require.Nil(t, err) require.Equal(t, newName, m2.Name, "Did not serve keys name correctly") - require.Equal(t, addr2Bech32, m2.Address.String(), "Did not serve keys Address correctly") + require.Equal(t, addr2Bech32, m2.Address, "Did not serve keys Address correctly") // update key jsonStr = []byte(fmt.Sprintf(`{ @@ -116,41 +122,46 @@ func TestKeys(t *testing.T) { } func TestVersion(t *testing.T) { - cleanup, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{}) + // skip the test if the VERSION environment variable has not been set + if version.Version == "" { + t.SkipNow() + } + + cleanup, _, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{}) defer cleanup() // node info res, body := Request(t, port, "GET", "/version", nil) require.Equal(t, http.StatusOK, res.StatusCode, body) - reg, err := regexp.Compile(`\d+\.\d+\.\d+(-dev)?`) + reg, err := regexp.Compile(`\d+\.\d+\.\d+.*`) require.Nil(t, err) match := reg.MatchString(body) - require.True(t, match, body) + require.True(t, match, body, fmt.Sprintf("%s", body)) // node info res, body = Request(t, port, "GET", "/node_version", nil) require.Equal(t, http.StatusOK, res.StatusCode, body) - reg, err = regexp.Compile(`\d+\.\d+\.\d+(-dev)?`) + reg, err = regexp.Compile(`\d+\.\d+\.\d+.*`) require.Nil(t, err) match = reg.MatchString(body) require.True(t, match, body) } func TestNodeStatus(t *testing.T) { - cleanup, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{}) + cleanup, _, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{}) defer cleanup() // node info res, body := Request(t, port, "GET", "/node_info", nil) require.Equal(t, http.StatusOK, res.StatusCode, body) - var nodeInfo p2p.NodeInfo + var nodeInfo p2p.DefaultNodeInfo err := cdc.UnmarshalJSON([]byte(body), &nodeInfo) require.Nil(t, err, "Couldn't parse node info") - require.NotEqual(t, p2p.NodeInfo{}, nodeInfo, "res: %v", res) + require.NotEqual(t, p2p.DefaultNodeInfo{}, nodeInfo, "res: %v", res) // syncing res, body = Request(t, port, "GET", "/syncing", nil) @@ -161,7 +172,7 @@ func TestNodeStatus(t *testing.T) { } func TestBlock(t *testing.T) { - cleanup, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{}) + cleanup, _, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{}) defer cleanup() var resultBlock ctypes.ResultBlock @@ -176,10 +187,10 @@ func TestBlock(t *testing.T) { // -- - res, body = Request(t, port, "GET", "/blocks/1", nil) + res, body = Request(t, port, "GET", "/blocks/2", nil) require.Equal(t, http.StatusOK, res.StatusCode, body) - err = wire.Cdc.UnmarshalJSON([]byte(body), &resultBlock) + err = codec.Cdc.UnmarshalJSON([]byte(body), &resultBlock) require.Nil(t, err, "Couldn't parse block") require.NotEqual(t, ctypes.ResultBlock{}, resultBlock) @@ -191,7 +202,7 @@ func TestBlock(t *testing.T) { } func TestValidators(t *testing.T) { - cleanup, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{}) + cleanup, _, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{}) defer cleanup() var resultVals rpc.ResultValidatorsOutput @@ -204,12 +215,12 @@ func TestValidators(t *testing.T) { require.NotEqual(t, rpc.ResultValidatorsOutput{}, resultVals) - require.Contains(t, resultVals.Validators[0].Address.String(), "cosmosvaladdr") - require.Contains(t, resultVals.Validators[0].PubKey, "cosmosvalpub") + require.Contains(t, resultVals.Validators[0].Address.String(), "cosmosvaloper") + require.Contains(t, resultVals.Validators[0].PubKey, "cosmosvalconspub") // -- - res, body = Request(t, port, "GET", "/validatorsets/1", nil) + res, body = Request(t, port, "GET", "/validatorsets/2", nil) require.Equal(t, http.StatusOK, res.StatusCode, body) err = cdc.UnmarshalJSON([]byte(body), &resultVals) @@ -226,7 +237,7 @@ func TestValidators(t *testing.T) { func TestCoinSend(t *testing.T) { name, password := "test", "1234567890" addr, seed := CreateAddr(t, "test", password, GetKeyBase(t)) - cleanup, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) + cleanup, _, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) defer cleanup() bz, err := hex.DecodeString("8FA6AB57AD6870F6B5B2E57735F38F2F30E73CB6") @@ -234,7 +245,7 @@ func TestCoinSend(t *testing.T) { someFakeAddr := sdk.AccAddress(bz) // query empty - res, body := Request(t, port, "GET", fmt.Sprintf("/accounts/%s", someFakeAddr), nil) + res, body := Request(t, port, "GET", fmt.Sprintf("/auth/accounts/%s", someFakeAddr), nil) require.Equal(t, http.StatusNoContent, res.StatusCode, body) acc := getAccount(t, port, addr) @@ -254,7 +265,7 @@ func TestCoinSend(t *testing.T) { mycoins := coins[0] require.Equal(t, "steak", mycoins.Denom) - require.Equal(t, initialBalance[0].Amount.SubRaw(1), mycoins.Amount) + require.Equal(t, initialBalance[0].Amount-1, mycoins.Amount) // query receiver acc = getAccount(t, port, receiveAddr) @@ -262,13 +273,13 @@ func TestCoinSend(t *testing.T) { mycoins = coins[0] require.Equal(t, "steak", mycoins.Denom) - require.Equal(t, int64(1), mycoins.Amount.Int64()) + require.Equal(t, int64(1), mycoins.Amount) } -func TestIBCTransfer(t *testing.T) { +func DisabledTestIBCTransfer(t *testing.T) { name, password := "test", "1234567890" addr, seed := CreateAddr(t, "test", password, GetKeyBase(t)) - cleanup, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) + cleanup, _, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) defer cleanup() acc := getAccount(t, port, addr) @@ -289,15 +300,73 @@ func TestIBCTransfer(t *testing.T) { mycoins := coins[0] require.Equal(t, "steak", mycoins.Denom) - require.Equal(t, initialBalance[0].Amount.SubRaw(1), mycoins.Amount) + require.Equal(t, initialBalance[0].Amount-1, mycoins.Amount) // TODO: query ibc egress packet state } +func TestCoinSendGenerateSignAndBroadcast(t *testing.T) { + name, password := "test", "1234567890" + addr, seed := CreateAddr(t, "test", password, GetKeyBase(t)) + cleanup, _, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) + defer cleanup() + acc := getAccount(t, port, addr) + + // generate TX + res, body, _ := doSendWithGas(t, port, seed, name, password, addr, "simulate", 0, "?generate_only=true") + + require.Equal(t, http.StatusOK, res.StatusCode, body) + var msg auth.StdTx + require.Nil(t, cdc.UnmarshalJSON([]byte(body), &msg)) + require.Equal(t, len(msg.Msgs), 1) + require.Equal(t, msg.Msgs[0].Route(), "bank") + require.Equal(t, msg.Msgs[0].GetSigners(), []sdk.AccAddress{addr}) + require.Equal(t, 0, len(msg.Signatures)) + + // sign tx + var signedMsg auth.StdTx + accnum := acc.GetAccountNumber() + sequence := acc.GetSequence() + + payload := authrest.SignBody{ + Tx: msg, + LocalAccountName: name, + Password: password, + ChainID: viper.GetString(client.FlagChainID), + AccountNumber: accnum, + Sequence: sequence, + } + json, err := cdc.MarshalJSON(payload) + require.Nil(t, err) + res, body = Request(t, port, "POST", "/tx/sign", json) + require.Equal(t, http.StatusOK, res.StatusCode, body) + require.Nil(t, cdc.UnmarshalJSON([]byte(body), &signedMsg)) + require.Equal(t, len(msg.Msgs), len(signedMsg.Msgs)) + require.Equal(t, msg.Msgs[0].Type(), signedMsg.Msgs[0].Type()) + require.Equal(t, msg.Msgs[0].GetSigners(), signedMsg.Msgs[0].GetSigners()) + require.Equal(t, 1, len(signedMsg.Signatures)) + + // broadcast tx + broadcastPayload := struct { + Tx auth.StdTx `json:"tx"` + Return string `json:"return"` + }{Tx: signedMsg, Return: "block"} + json, err = cdc.MarshalJSON(broadcastPayload) + require.Nil(t, err) + res, body = Request(t, port, "POST", "/tx/broadcast", json) + require.Equal(t, http.StatusOK, res.StatusCode, body) + + // check if tx was committed + var resultTx ctypes.ResultBroadcastTxCommit + require.Nil(t, cdc.UnmarshalJSON([]byte(body), &resultTx)) + require.Equal(t, uint32(0), resultTx.CheckTx.Code) + require.Equal(t, uint32(0), resultTx.DeliverTx.Code) +} + func TestTxs(t *testing.T) { name, password := "test", "1234567890" addr, seed := CreateAddr(t, "test", password, GetKeyBase(t)) - cleanup, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) + cleanup, _, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) defer cleanup() // query wrong @@ -305,7 +374,7 @@ func TestTxs(t *testing.T) { require.Equal(t, http.StatusBadRequest, res.StatusCode, body) // query empty - res, body = Request(t, port, "GET", fmt.Sprintf("/txs?tag=sender_bech32='%s'", "cosmosaccaddr1jawd35d9aq4u76sr3fjalmcqc8hqygs9gtnmv3"), nil) + res, body = Request(t, port, "GET", fmt.Sprintf("/txs?tag=sender_bech32='%s'", "cosmos1jawd35d9aq4u76sr3fjalmcqc8hqygs90d0g0v"), nil) require.Equal(t, http.StatusOK, res.StatusCode, body) require.Equal(t, "[]", body) @@ -353,324 +422,251 @@ func TestTxs(t *testing.T) { require.Equal(t, resultTx.Height, indexedTxs[0].Height) } +func TestPoolParamsQuery(t *testing.T) { + _, password := "test", "1234567890" + addr, _ := CreateAddr(t, "test", password, GetKeyBase(t)) + cleanup, _, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) + defer cleanup() + + defaultParams := stake.DefaultParams() + + res, body := Request(t, port, "GET", "/stake/parameters", nil) + require.Equal(t, http.StatusOK, res.StatusCode, body) + + var params stake.Params + err := cdc.UnmarshalJSON([]byte(body), ¶ms) + require.Nil(t, err) + require.True(t, defaultParams.Equal(params)) + + res, body = Request(t, port, "GET", "/stake/pool", nil) + require.Equal(t, http.StatusOK, res.StatusCode, body) + require.NotNil(t, body) + + initialPool := stake.InitialPool() + initialPool.LooseTokens = initialPool.LooseTokens.Add(sdk.NewDecWithPrec(100, 0)) + initialPool.BondedTokens = initialPool.BondedTokens.Add(sdk.NewDecWithPrec(100, 0)) // Delegate tx on GaiaAppGenState + initialPool.LooseTokens = initialPool.LooseTokens.Add(sdk.NewDecWithPrec(50, 0)) // freeFermionsAcc = 50 on GaiaAppGenState + + var pool stake.Pool + err = cdc.UnmarshalJSON([]byte(body), &pool) + require.Nil(t, err) + require.Equal(t, initialPool.BondedTokens, pool.BondedTokens) + require.Equal(t, initialPool.LooseTokens, pool.LooseTokens) +} + func TestValidatorsQuery(t *testing.T) { - cleanup, pks, port := InitializeTestLCD(t, 1, []sdk.AccAddress{}) + cleanup, valPubKeys, operAddrs, port := InitializeTestLCD(t, 1, []sdk.AccAddress{}) defer cleanup() - require.Equal(t, 1, len(pks)) + + require.Equal(t, 1, len(valPubKeys)) + require.Equal(t, 1, len(operAddrs)) validators := getValidators(t, port) - require.Equal(t, len(validators), 1) + require.Equal(t, 1, len(validators), fmt.Sprintf("%+v", validators)) - // make sure all the validators were found (order unknown because sorted by owner addr) + // make sure all the validators were found (order unknown because sorted by operator addr) foundVal := false - pkBech := sdk.MustBech32ifyValPub(pks[0]) - if validators[0].PubKey == pkBech { + + if validators[0].ConsPubKey == valPubKeys[0] { foundVal = true } - require.True(t, foundVal, "pkBech %v, owner %v", pkBech, validators[0].Owner) + + require.True(t, foundVal, "pk %v, operator %v", operAddrs[0], validators[0].OperatorAddr) } func TestValidatorQuery(t *testing.T) { - cleanup, pks, port := InitializeTestLCD(t, 1, []sdk.AccAddress{}) + cleanup, valPubKeys, operAddrs, port := InitializeTestLCD(t, 1, []sdk.AccAddress{}) defer cleanup() - require.Equal(t, 1, len(pks)) + require.Equal(t, 1, len(valPubKeys)) + require.Equal(t, 1, len(operAddrs)) - validator1Owner := sdk.AccAddress(pks[0].Address()) - - validator := getValidator(t, port, validator1Owner) - bech32ValAddress, err := sdk.Bech32ifyValPub(pks[0]) - require.NoError(t, err) - assert.Equal(t, validator.PubKey, bech32ValAddress, "The returned validator does not hold the correct data") + validator := getValidator(t, port, operAddrs[0]) + require.Equal(t, validator.OperatorAddr, operAddrs[0], "The returned validator does not hold the correct data") } func TestBonding(t *testing.T) { name, password, denom := "test", "1234567890", "steak" - addr, seed := CreateAddr(t, "test", password, GetKeyBase(t)) - cleanup, pks, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) + addr, seed := CreateAddr(t, name, password, GetKeyBase(t)) + + cleanup, valPubKeys, operAddrs, port := InitializeTestLCD(t, 2, []sdk.AccAddress{addr}) defer cleanup() - validator1Owner := sdk.AccAddress(pks[0].Address()) + require.Equal(t, 2, len(valPubKeys)) + require.Equal(t, 2, len(operAddrs)) + + amt := sdk.NewDecWithoutFra(60) + validator := getValidator(t, port, operAddrs[0]) // create bond TX - resultTx := doDelegate(t, port, seed, name, password, addr, validator1Owner) + resultTx := doDelegate(t, port, seed, name, password, addr, operAddrs[0], sdk.NewDecWithoutFra(60).RawInt()) tests.WaitForHeight(resultTx.Height+1, port) - // check if tx was committed require.Equal(t, uint32(0), resultTx.CheckTx.Code) require.Equal(t, uint32(0), resultTx.DeliverTx.Code) - // query sender acc := getAccount(t, port, addr) coins := acc.GetCoins() - require.Equal(t, int64(40), coins.AmountOf(denom).Int64()) + require.Equal(t, sdk.NewDecWithoutFra(40).RawInt(), coins.AmountOf(denom)) // query validator - bond := getDelegation(t, port, addr, validator1Owner) - require.Equal(t, "60.0000000000", bond.Shares) + bond := getDelegation(t, port, addr, operAddrs[0]) + require.Equal(t, amt, bond.Shares) - ////////////////////// - // testing unbonding + delegatorDels := getDelegatorDelegations(t, port, addr) + require.Len(t, delegatorDels, 1) + require.Equal(t, amt, delegatorDels[0].Shares) - // create unbond TX - resultTx = doBeginUnbonding(t, port, seed, name, password, addr, validator1Owner) - tests.WaitForHeight(resultTx.Height+1, port) + bondedValidators := getDelegatorValidators(t, port, addr) + require.Len(t, bondedValidators, 1) + require.Equal(t, operAddrs[0], bondedValidators[0].OperatorAddr) + require.Equal(t, validator.DelegatorShares.Add(amt).String(), bondedValidators[0].DelegatorShares.String()) - // query validator - bond = getDelegation(t, port, addr, validator1Owner) - require.Equal(t, "30.0000000000", bond.Shares) + bondedValidator := getDelegatorValidator(t, port, addr, operAddrs[0]) + require.Equal(t, operAddrs[0], bondedValidator.OperatorAddr) + + // testing unbonding + resultTx = doBeginUnbonding(t, port, seed, name, password, addr, operAddrs[0], sdk.NewDecWithoutFra(30).RawInt()) + tests.WaitForHeight(resultTx.Height+1, port) - // check if tx was committed require.Equal(t, uint32(0), resultTx.CheckTx.Code) require.Equal(t, uint32(0), resultTx.DeliverTx.Code) - // should the sender should have not received any coins as the unbonding has only just begun - // query sender + // sender should have not received any coins as the unbonding has only just begun acc = getAccount(t, port, addr) coins = acc.GetCoins() - require.Equal(t, int64(40), coins.AmountOf("steak").Int64()) + require.Equal(t, sdk.NewDecWithoutFra(40).RawInt(), coins.AmountOf("steak")) + + unbonding := getUndelegation(t, port, addr, operAddrs[0]) + require.Equal(t, sdk.NewDecWithoutFra(30).RawInt(), unbonding.Balance.Amount) + + // test redelegation + resultTx = doBeginRedelegation(t, port, seed, name, password, addr, operAddrs[0], operAddrs[1], sdk.NewDecWithoutFra(30).RawInt()) + tests.WaitForHeight(resultTx.Height+1, port) + + require.Equal(t, uint32(0), resultTx.CheckTx.Code) + require.Equal(t, uint32(0), resultTx.DeliverTx.Code) - // query unbonding delegation - validatorAddr := sdk.AccAddress(pks[0].Address()) - unbondings := getUndelegations(t, port, addr, validatorAddr) - assert.Len(t, unbondings, 1, "Unbondings holds all unbonding-delegations") - assert.Equal(t, "30", unbondings[0].Balance.Amount.String()) + // query delegations, unbondings and redelegations from validator and delegator + delegatorDels = getDelegatorDelegations(t, port, addr) + require.Len(t, delegatorDels, 1) + require.Equal(t, "3000000000", delegatorDels[0].GetShares().String()) - // query summary - summary := getDelegationSummary(t, port, addr) + delegatorUbds := getDelegatorUnbondingDelegations(t, port, addr) + require.Len(t, delegatorUbds, 1) + require.Equal(t, sdk.NewDecWithoutFra(30).RawInt(), delegatorUbds[0].Balance.Amount) - assert.Len(t, summary.Delegations, 1, "Delegation summary holds all delegations") - assert.Equal(t, "30.0000000000", summary.Delegations[0].Shares) - assert.Len(t, summary.UnbondingDelegations, 1, "Delegation summary holds all unbonding-delegations") - assert.Equal(t, "30", summary.UnbondingDelegations[0].Balance.Amount.String()) + delegatorReds := getDelegatorRedelegations(t, port, addr) + require.Len(t, delegatorReds, 1) + require.Equal(t, sdk.NewDecWithoutFra(30).RawInt(), delegatorReds[0].Balance.Amount) - // TODO add redelegation, need more complex capabilities such to mock context and - // TODO check summary for redelegation - // assert.Len(t, summary.Redelegations, 1, "Delegation summary holds all redelegations") + validatorUbds := getValidatorUnbondingDelegations(t, port, operAddrs[0]) + require.Len(t, validatorUbds, 1) + require.Equal(t, sdk.NewDecWithoutFra(30).RawInt(), validatorUbds[0].Balance.Amount) + + validatorReds := getValidatorRedelegations(t, port, operAddrs[0]) + require.Len(t, validatorReds, 1) + require.Equal(t, sdk.NewDecWithoutFra(30).RawInt(), validatorReds[0].Balance.Amount) + + // TODO Undonding status not currently implemented + // require.Equal(t, sdk.Unbonding, bondedValidators[0].Status) // query txs txs := getBondingTxs(t, port, addr, "") - assert.Len(t, txs, 2, "All Txs found") + require.Len(t, txs, 3, "All Txs found") txs = getBondingTxs(t, port, addr, "bond") - assert.Len(t, txs, 1, "All bonding txs found") + require.Len(t, txs, 1, "All bonding txs found") txs = getBondingTxs(t, port, addr, "unbond") - assert.Len(t, txs, 1, "All unbonding txs found") + require.Len(t, txs, 1, "All unbonding txs found") + + txs = getBondingTxs(t, port, addr, "redelegate") + require.Len(t, txs, 1, "All redelegation txs found") } func TestSubmitProposal(t *testing.T) { name, password := "test", "1234567890" addr, seed := CreateAddr(t, "test", password, GetKeyBase(t)) - cleanup, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) + cleanup, _, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) defer cleanup() // create SubmitProposal TX - resultTx := doSubmitProposal(t, port, seed, name, password, addr) + resultTx := doSubmitProposal(t, port, seed, name, password, addr, 5) tests.WaitForHeight(resultTx.Height+1, port) // check if tx was committed require.Equal(t, uint32(0), resultTx.CheckTx.Code) require.Equal(t, uint32(0), resultTx.DeliverTx.Code) - var proposalID int64 - cdc.UnmarshalBinaryBare(resultTx.DeliverTx.GetData(), &proposalID) + proposalID, _ := strconv.Atoi(string(resultTx.DeliverTx.GetData())) // query proposal - proposal := getProposal(t, port, proposalID) + proposal := getProposal(t, port, int64(proposalID)) require.Equal(t, "Test", proposal.GetTitle()) } func TestDeposit(t *testing.T) { name, password := "test", "1234567890" addr, seed := CreateAddr(t, "test", password, GetKeyBase(t)) - cleanup, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) + cleanup, _, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) defer cleanup() // create SubmitProposal TX - resultTx := doSubmitProposal(t, port, seed, name, password, addr) + resultTx := doSubmitProposal(t, port, seed, name, password, addr, 5) tests.WaitForHeight(resultTx.Height+1, port) // check if tx was committed require.Equal(t, uint32(0), resultTx.CheckTx.Code) require.Equal(t, uint32(0), resultTx.DeliverTx.Code) - var proposalID int64 - cdc.UnmarshalBinaryBare(resultTx.DeliverTx.GetData(), &proposalID) + proposalID, _ := strconv.Atoi(string(resultTx.DeliverTx.GetData())) // query proposal - proposal := getProposal(t, port, proposalID) + proposal := getProposal(t, port, int64(proposalID)) require.Equal(t, "Test", proposal.GetTitle()) // create SubmitProposal TX - resultTx = doDeposit(t, port, seed, name, password, addr, proposalID) + resultTx = doDeposit(t, port, seed, name, password, addr, int64(proposalID), 5) tests.WaitForHeight(resultTx.Height+1, port) // query proposal - proposal = getProposal(t, port, proposalID) - require.True(t, proposal.GetTotalDeposit().IsEqual(sdk.Coins{sdk.NewInt64Coin("steak", 10)})) + proposal = getProposal(t, port, int64(proposalID)) + require.True(t, proposal.GetTotalDeposit().IsEqual(sdk.Coins{sdk.NewCoin("steak", 10)})) // query deposit - deposit := getDeposit(t, port, proposalID, addr) - require.True(t, deposit.Amount.IsEqual(sdk.Coins{sdk.NewInt64Coin("steak", 10)})) -} - -func TestVote(t *testing.T) { - name, password := "test", "1234567890" - addr, seed := CreateAddr(t, "test", password, GetKeyBase(t)) - cleanup, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) - defer cleanup() - - // create SubmitProposal TX - resultTx := doSubmitProposal(t, port, seed, name, password, addr) - tests.WaitForHeight(resultTx.Height+1, port) - - // check if tx was committed - require.Equal(t, uint32(0), resultTx.CheckTx.Code) - require.Equal(t, uint32(0), resultTx.DeliverTx.Code) - - var proposalID int64 - cdc.UnmarshalBinaryBare(resultTx.DeliverTx.GetData(), &proposalID) - - // query proposal - proposal := getProposal(t, port, proposalID) - require.Equal(t, "Test", proposal.GetTitle()) - - // create SubmitProposal TX - resultTx = doDeposit(t, port, seed, name, password, addr, proposalID) - tests.WaitForHeight(resultTx.Height+1, port) - - // query proposal - proposal = getProposal(t, port, proposalID) - require.Equal(t, gov.StatusVotingPeriod, proposal.GetStatus()) - - // create SubmitProposal TX - resultTx = doVote(t, port, seed, name, password, addr, proposalID) - tests.WaitForHeight(resultTx.Height+1, port) - - vote := getVote(t, port, proposalID, addr) - require.Equal(t, proposalID, vote.ProposalID) - require.Equal(t, gov.OptionYes, vote.Option) + deposit := getDeposit(t, port, int64(proposalID), addr) + require.True(t, deposit.Amount.IsEqual(sdk.Coins{sdk.NewCoin("steak", 10)})) } -func TestUnrevoke(t *testing.T) { +func TestUnjail(t *testing.T) { _, password := "test", "1234567890" addr, _ := CreateAddr(t, "test", password, GetKeyBase(t)) - cleanup, pks, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) + cleanup, valPubKeys, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}) defer cleanup() // XXX: any less than this and it fails tests.WaitForHeight(3, port) - pkString, _ := sdk.Bech32ifyValPub(pks[0]) + pkString, _ := sdk.Bech32ifyConsPub(valPubKeys[0]) signingInfo := getSigningInfo(t, port, pkString) tests.WaitForHeight(4, port) require.Equal(t, true, signingInfo.IndexOffset > 0) require.Equal(t, time.Unix(0, 0).UTC(), signingInfo.JailedUntil) - require.Equal(t, true, signingInfo.SignedBlocksCounter > 0) -} - -func TestProposalsQuery(t *testing.T) { - name, password1 := "test", "1234567890" - name2, password2 := "test2", "1234567890" - addr, seed := CreateAddr(t, "test", password1, GetKeyBase(t)) - addr2, seed2 := CreateAddr(t, "test2", password2, GetKeyBase(t)) - cleanup, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr, addr2}) - defer cleanup() - - // Addr1 proposes (and deposits) proposals #1 and #2 - resultTx := doSubmitProposal(t, port, seed, name, password1, addr) - var proposalID1 int64 - cdc.UnmarshalBinaryBare(resultTx.DeliverTx.GetData(), &proposalID1) - tests.WaitForHeight(resultTx.Height+1, port) - resultTx = doSubmitProposal(t, port, seed, name, password1, addr) - var proposalID2 int64 - cdc.UnmarshalBinaryBare(resultTx.DeliverTx.GetData(), &proposalID2) - tests.WaitForHeight(resultTx.Height+1, port) - - // Addr2 proposes (and deposits) proposals #3 - resultTx = doSubmitProposal(t, port, seed2, name2, password2, addr2) - var proposalID3 int64 - cdc.UnmarshalBinaryBare(resultTx.DeliverTx.GetData(), &proposalID3) - tests.WaitForHeight(resultTx.Height+1, port) - - // Addr2 deposits on proposals #2 & #3 - resultTx = doDeposit(t, port, seed2, name2, password2, addr2, proposalID2) - tests.WaitForHeight(resultTx.Height+1, port) - resultTx = doDeposit(t, port, seed2, name2, password2, addr2, proposalID3) - tests.WaitForHeight(resultTx.Height+1, port) - - // Only proposals #1 should be in Deposit Period - proposals := getProposalsFilterStatus(t, port, gov.StatusDepositPeriod) - require.Len(t, proposals, 1) - require.Equal(t, proposalID1, proposals[0].GetProposalID()) - // Only proposals #2 and #3 should be in Voting Period - proposals = getProposalsFilterStatus(t, port, gov.StatusVotingPeriod) - require.Len(t, proposals, 2) - require.Equal(t, proposalID2, proposals[0].GetProposalID()) - require.Equal(t, proposalID3, proposals[1].GetProposalID()) - - // Addr1 votes on proposals #2 & #3 - resultTx = doVote(t, port, seed, name, password1, addr, proposalID2) - tests.WaitForHeight(resultTx.Height+1, port) - resultTx = doVote(t, port, seed, name, password1, addr, proposalID3) - tests.WaitForHeight(resultTx.Height+1, port) - - // Addr2 votes on proposal #3 - resultTx = doVote(t, port, seed2, name2, password2, addr2, proposalID3) - tests.WaitForHeight(resultTx.Height+1, port) - - // Test query all proposals - proposals = getProposalsAll(t, port) - require.Equal(t, proposalID1, (proposals[0]).GetProposalID()) - require.Equal(t, proposalID2, (proposals[1]).GetProposalID()) - require.Equal(t, proposalID3, (proposals[2]).GetProposalID()) - - // Test query deposited by addr1 - proposals = getProposalsFilterDepositer(t, port, addr) - require.Equal(t, proposalID1, (proposals[0]).GetProposalID()) - - // Test query deposited by addr2 - proposals = getProposalsFilterDepositer(t, port, addr2) - require.Equal(t, proposalID2, (proposals[0]).GetProposalID()) - require.Equal(t, proposalID3, (proposals[1]).GetProposalID()) - - // Test query voted by addr1 - proposals = getProposalsFilterVoter(t, port, addr) - require.Equal(t, proposalID2, (proposals[0]).GetProposalID()) - require.Equal(t, proposalID3, (proposals[1]).GetProposalID()) - - // Test query voted by addr2 - proposals = getProposalsFilterVoter(t, port, addr2) - require.Equal(t, proposalID3, (proposals[0]).GetProposalID()) - - // Test query voted and deposited by addr1 - proposals = getProposalsFilterVoterDepositer(t, port, addr, addr) - require.Equal(t, proposalID2, (proposals[0]).GetProposalID()) - - // Test query votes on Proposal 2 - votes := getVotes(t, port, proposalID2) - require.Len(t, votes, 1) - require.Equal(t, addr, votes[0].Voter) - - // Test query votes on Proposal 3 - votes = getVotes(t, port, proposalID3) - require.Len(t, votes, 2) - require.True(t, addr.String() == votes[0].Voter.String() || addr.String() == votes[1].Voter.String()) - require.True(t, addr2.String() == votes[0].Voter.String() || addr2.String() == votes[1].Voter.String()) + require.Equal(t, true, signingInfo.MissedBlocksCounter == 0) } //_____________________________________________________________________________ // get the account to get the sequence -func getAccount(t *testing.T, port string, addr sdk.AccAddress) auth.Account { - res, body := Request(t, port, "GET", fmt.Sprintf("/accounts/%s", addr), nil) +func getAccount(t *testing.T, port string, addr sdk.AccAddress) sdk.Account { + res, body := Request(t, port, "GET", fmt.Sprintf("/auth/accounts/%s", addr), nil) require.Equal(t, http.StatusOK, res.StatusCode, body) - var acc auth.Account + var acc sdk.Account err := cdc.UnmarshalJSON([]byte(body), &acc) require.Nil(t, err) return acc } -func doSend(t *testing.T, port, seed, name, password string, addr sdk.AccAddress) (receiveAddr sdk.AccAddress, resultTx ctypes.ResultBroadcastTxCommit) { +func doSendWithGas(t *testing.T, port, seed, name, password string, addr sdk.AccAddress, gas string, gasAdjustment float64, queryStr string) (res *http.Response, body string, receiveAddr sdk.AccAddress) { // create receive address kb := client.MockKeyBase() @@ -683,24 +679,58 @@ func doSend(t *testing.T, port, seed, name, password string, addr sdk.AccAddress sequence := acc.GetSequence() chainID := viper.GetString(client.FlagChainID) // send - coinbz, err := cdc.MarshalJSON(sdk.NewInt64Coin("steak", 1)) + coinbz, err := cdc.MarshalJSON(sdk.NewCoin("steak", 1)) if err != nil { panic(err) } + gasStr := "" + if len(gas) != 0 { + gasStr = fmt.Sprintf(` + "gas":%q, + `, gas) + } + gasAdjustmentStr := "" + if gasAdjustment > 0 { + gasAdjustmentStr = fmt.Sprintf(` + "gas_adjustment":"%v", + `, gasAdjustment) + } jsonStr := []byte(fmt.Sprintf(`{ - "name":"%s", - "password":"%s", - "account_number":"%d", - "sequence":"%d", - "gas": "10000", "amount":[%s], - "chain_id":"%s" - }`, name, password, accnum, sequence, coinbz, chainID)) - res, body := Request(t, port, "POST", fmt.Sprintf("/accounts/%s/send", receiveAddr), jsonStr) + "base_req": { + %v%v + "name": "%s", + "password": "%s", + "chain_id": "%s", + "account_number":"%d", + "sequence":"%d" + } + }`, coinbz, gasStr, gasAdjustmentStr, name, password, chainID, accnum, sequence)) + + res, body = Request(t, port, "POST", fmt.Sprintf("/bank/accounts/%s/transfers%v", receiveAddr, queryStr), jsonStr) + return +} + +func doRecoverKey(t *testing.T, port, recoverName, recoverPassword, seed string) { + jsonStr := []byte(fmt.Sprintf(`{"password":"%s", "seed":"%s"}`, recoverPassword, seed)) + res, body := Request(t, port, "POST", fmt.Sprintf("/keys/%s/recover", recoverName), jsonStr) + require.Equal(t, http.StatusOK, res.StatusCode, body) + var resp keys.KeyOutput + err := codec.Cdc.UnmarshalJSON([]byte(body), &resp) + require.Nil(t, err, body) - err = cdc.UnmarshalJSON([]byte(body), &resultTx) + addr1Bech32 := resp.Address + _, err = sdk.AccAddressFromBech32(addr1Bech32) + require.NoError(t, err, "Failed to return a correct bech32 address") +} + +func doSend(t *testing.T, port, seed, name, password string, addr sdk.AccAddress) (receiveAddr sdk.AccAddress, resultTx ctypes.ResultBroadcastTxCommit) { + res, body, receiveAddr := doSendWithGas(t, port, seed, name, password, addr, "", 0, "") + require.Equal(t, http.StatusOK, res.StatusCode, body) + + err := cdc.UnmarshalJSON([]byte(body), &resultTx) require.Nil(t, err) return receiveAddr, resultTx @@ -722,19 +752,20 @@ func doIBCTransfer(t *testing.T, port, seed, name, password string, addr sdk.Acc // send jsonStr := []byte(fmt.Sprintf(`{ - "name":"%s", - "password": "%s", - "account_number":"%d", - "sequence": "%d", - "gas": "100000", - "src_chain_id": "%s", "amount":[ { "denom": "%s", "amount": "1" } - ] - }`, name, password, accnum, sequence, chainID, "steak")) + ], + "base_req": { + "name": "%s", + "password": "%s", + "chain_id": "%s", + "account_number":"%d", + "sequence":"%d" + } + }`, "steak", name, password, chainID, accnum, sequence)) res, body := Request(t, port, "POST", fmt.Sprintf("/ibc/testchain/%s/send", receiveAddr), jsonStr) require.Equal(t, http.StatusOK, res.StatusCode, body) @@ -746,95 +777,146 @@ func doIBCTransfer(t *testing.T, port, seed, name, password string, addr sdk.Acc } func getSigningInfo(t *testing.T, port string, validatorPubKey string) slashing.ValidatorSigningInfo { - res, body := Request(t, port, "GET", fmt.Sprintf("/slashing/signing_info/%s", validatorPubKey), nil) + res, body := Request(t, port, "GET", fmt.Sprintf("/slashing/validators/%s/signing_info", validatorPubKey), nil) require.Equal(t, http.StatusOK, res.StatusCode, body) + var signingInfo slashing.ValidatorSigningInfo err := cdc.UnmarshalJSON([]byte(body), &signingInfo) require.Nil(t, err) + return signingInfo } // ============= Stake Module ================ -func getDelegation(t *testing.T, port string, delegatorAddr, validatorAddr sdk.AccAddress) rest.DelegationWithoutRat { - - // get the account to get the sequence +func getDelegation(t *testing.T, port string, delegatorAddr sdk.AccAddress, validatorAddr sdk.ValAddress) stake.Delegation { res, body := Request(t, port, "GET", fmt.Sprintf("/stake/delegators/%s/delegations/%s", delegatorAddr, validatorAddr), nil) require.Equal(t, http.StatusOK, res.StatusCode, body) - var bond rest.DelegationWithoutRat + + var bond stake.Delegation err := cdc.UnmarshalJSON([]byte(body), &bond) require.Nil(t, err) + return bond } -func getUndelegations(t *testing.T, port string, delegatorAddr, validatorAddr sdk.AccAddress) []stake.UnbondingDelegation { - - // get the account to get the sequence +func getUndelegation(t *testing.T, port string, delegatorAddr sdk.AccAddress, validatorAddr sdk.ValAddress) stake.UnbondingDelegation { res, body := Request(t, port, "GET", fmt.Sprintf("/stake/delegators/%s/unbonding_delegations/%s", delegatorAddr, validatorAddr), nil) require.Equal(t, http.StatusOK, res.StatusCode, body) - var unbondings []stake.UnbondingDelegation - err := cdc.UnmarshalJSON([]byte(body), &unbondings) + + var unbond stake.UnbondingDelegation + err := cdc.UnmarshalJSON([]byte(body), &unbond) + require.Nil(t, err) + + return unbond +} + +func getDelegatorDelegations(t *testing.T, port string, delegatorAddr sdk.AccAddress) []stake.Delegation { + res, body := Request(t, port, "GET", fmt.Sprintf("/stake/delegators/%s/delegations", delegatorAddr), nil) + require.Equal(t, http.StatusOK, res.StatusCode, body) + + var dels []stake.Delegation + + err := cdc.UnmarshalJSON([]byte(body), &dels) require.Nil(t, err) - return unbondings + + return dels } -func getDelegationSummary(t *testing.T, port string, delegatorAddr sdk.AccAddress) rest.DelegationSummary { +func getDelegatorUnbondingDelegations(t *testing.T, port string, delegatorAddr sdk.AccAddress) []stake.UnbondingDelegation { + res, body := Request(t, port, "GET", fmt.Sprintf("/stake/delegators/%s/unbonding_delegations", delegatorAddr), nil) + require.Equal(t, http.StatusOK, res.StatusCode, body) - // get the account to get the sequence - res, body := Request(t, port, "GET", fmt.Sprintf("/stake/delegators/%s", delegatorAddr), nil) + var ubds []stake.UnbondingDelegation + + err := cdc.UnmarshalJSON([]byte(body), &ubds) + require.Nil(t, err) + + return ubds +} + +func getDelegatorRedelegations(t *testing.T, port string, delegatorAddr sdk.AccAddress) []stake.Redelegation { + res, body := Request(t, port, "GET", fmt.Sprintf("/stake/delegators/%s/redelegations", delegatorAddr), nil) require.Equal(t, http.StatusOK, res.StatusCode, body) - var summary rest.DelegationSummary - err := cdc.UnmarshalJSON([]byte(body), &summary) + + var reds []stake.Redelegation + + err := cdc.UnmarshalJSON([]byte(body), &reds) require.Nil(t, err) - return summary + + return reds } func getBondingTxs(t *testing.T, port string, delegatorAddr sdk.AccAddress, query string) []tx.Info { - - // get the account to get the sequence var res *http.Response var body string + if len(query) > 0 { res, body = Request(t, port, "GET", fmt.Sprintf("/stake/delegators/%s/txs?type=%s", delegatorAddr, query), nil) } else { res, body = Request(t, port, "GET", fmt.Sprintf("/stake/delegators/%s/txs", delegatorAddr), nil) } require.Equal(t, http.StatusOK, res.StatusCode, body) + var txs []tx.Info + err := cdc.UnmarshalJSON([]byte(body), &txs) require.Nil(t, err) + return txs } -func doDelegate(t *testing.T, port, seed, name, password string, delegatorAddr, validatorAddr sdk.AccAddress) (resultTx ctypes.ResultBroadcastTxCommit) { - // get the account to get the sequence - acc := getAccount(t, port, delegatorAddr) +func getDelegatorValidators(t *testing.T, port string, delegatorAddr sdk.AccAddress) []stake.Validator { + res, body := Request(t, port, "GET", fmt.Sprintf("/stake/delegators/%s/validators", delegatorAddr), nil) + require.Equal(t, http.StatusOK, res.StatusCode, body) + + var bondedValidators []stake.Validator + + err := cdc.UnmarshalJSON([]byte(body), &bondedValidators) + require.Nil(t, err) + + return bondedValidators +} + +func getDelegatorValidator(t *testing.T, port string, delegatorAddr sdk.AccAddress, validatorAddr sdk.ValAddress) stake.Validator { + res, body := Request(t, port, "GET", fmt.Sprintf("/stake/delegators/%s/validators/%s", delegatorAddr, validatorAddr), nil) + require.Equal(t, http.StatusOK, res.StatusCode, body) + + var bondedValidator stake.Validator + err := cdc.UnmarshalJSON([]byte(body), &bondedValidator) + require.Nil(t, err) + + return bondedValidator +} + +func doDelegate(t *testing.T, port, seed, name, password string, + delAddr sdk.AccAddress, valAddr sdk.ValAddress, amount int64) (resultTx ctypes.ResultBroadcastTxCommit) { + + acc := getAccount(t, port, delAddr) accnum := acc.GetAccountNumber() sequence := acc.GetSequence() - chainID := viper.GetString(client.FlagChainID) - // send jsonStr := []byte(fmt.Sprintf(`{ - "name": "%s", - "password": "%s", - "account_number": "%d", - "sequence": "%d", - "gas": "10000", - "chain_id": "%s", "delegations": [ { "delegator_addr": "%s", "validator_addr": "%s", - "delegation": { "denom": "%s", "amount": "60" } + "delegation": { "denom": "%s", "amount": "%d" } } ], "begin_unbondings": [], - "complete_unbondings": [], "begin_redelegates": [], - "complete_redelegates": [] - }`, name, password, accnum, sequence, chainID, delegatorAddr, validatorAddr, "steak")) - res, body := Request(t, port, "POST", fmt.Sprintf("/stake/delegators/%s/delegations", delegatorAddr), jsonStr) + "base_req": { + "name": "%s", + "password": "%s", + "chain_id": "%s", + "account_number":"%d", + "sequence":"%d" + } + }`, delAddr, valAddr, "steak", amount, name, password, chainID, accnum, sequence)) + + res, body := Request(t, port, "POST", fmt.Sprintf("/stake/delegators/%s/delegations", delAddr), jsonStr) require.Equal(t, http.StatusOK, res.StatusCode, body) var results []ctypes.ResultBroadcastTxCommit @@ -845,36 +927,33 @@ func doDelegate(t *testing.T, port, seed, name, password string, delegatorAddr, } func doBeginUnbonding(t *testing.T, port, seed, name, password string, - delegatorAddr, validatorAddr sdk.AccAddress) (resultTx ctypes.ResultBroadcastTxCommit) { + delAddr sdk.AccAddress, valAddr sdk.ValAddress, amount int64) (resultTx ctypes.ResultBroadcastTxCommit) { - // get the account to get the sequence - acc := getAccount(t, port, delegatorAddr) + acc := getAccount(t, port, delAddr) accnum := acc.GetAccountNumber() sequence := acc.GetSequence() - chainID := viper.GetString(client.FlagChainID) - // send jsonStr := []byte(fmt.Sprintf(`{ - "name": "%s", - "password": "%s", - "account_number": "%d", - "sequence": "%d", - "gas": "20000", - "chain_id": "%s", "delegations": [], "begin_unbondings": [ { "delegator_addr": "%s", "validator_addr": "%s", - "shares": "30" + "shares": "%d" } ], - "complete_unbondings": [], "begin_redelegates": [], - "complete_redelegates": [] - }`, name, password, accnum, sequence, chainID, delegatorAddr, validatorAddr)) - res, body := Request(t, port, "POST", fmt.Sprintf("/stake/delegators/%s/delegations", delegatorAddr), jsonStr) + "base_req": { + "name": "%s", + "password": "%s", + "chain_id": "%s", + "account_number":"%d", + "sequence":"%d" + } + }`, delAddr, valAddr, amount, name, password, chainID, accnum, sequence)) + + res, body := Request(t, port, "POST", fmt.Sprintf("/stake/delegators/%s/delegations", delAddr), jsonStr) require.Equal(t, http.StatusOK, res.StatusCode, body) var results []ctypes.ResultBroadcastTxCommit @@ -885,37 +964,35 @@ func doBeginUnbonding(t *testing.T, port, seed, name, password string, } func doBeginRedelegation(t *testing.T, port, seed, name, password string, - delegatorAddr, validatorSrcAddr, validatorDstAddr sdk.AccAddress) (resultTx ctypes.ResultBroadcastTxCommit) { + delAddr sdk.AccAddress, valSrcAddr, valDstAddr sdk.ValAddress, amount int64) (resultTx ctypes.ResultBroadcastTxCommit) { - // get the account to get the sequence - acc := getAccount(t, port, delegatorAddr) + acc := getAccount(t, port, delAddr) accnum := acc.GetAccountNumber() sequence := acc.GetSequence() chainID := viper.GetString(client.FlagChainID) - // send jsonStr := []byte(fmt.Sprintf(`{ - "name": "%s", - "password": "%s", - "account_number": "%d", - "sequence": "%d", - "gas": "10000", - "chain_id": "%s", "delegations": [], "begin_unbondings": [], - "complete_unbondings": [], "begin_redelegates": [ { "delegator_addr": "%s", "validator_src_addr": "%s", "validator_dst_addr": "%s", - "shares": "30" + "shares": "%d" } ], - "complete_redelegates": [] - }`, name, password, accnum, sequence, chainID, delegatorAddr, validatorSrcAddr, validatorDstAddr)) - res, body := Request(t, port, "POST", fmt.Sprintf("/stake/delegators/%s/delegations", delegatorAddr), jsonStr) + "base_req": { + "name": "%s", + "password": "%s", + "chain_id": "%s", + "account_number":"%d", + "sequence":"%d" + } + }`, delAddr, valSrcAddr, valDstAddr, amount, name, password, chainID, accnum, sequence)) + + res, body := Request(t, port, "POST", fmt.Sprintf("/stake/delegators/%s/delegations", delAddr), jsonStr) require.Equal(t, http.StatusOK, res.StatusCode, body) var results []ctypes.ResultBroadcastTxCommit @@ -925,26 +1002,50 @@ func doBeginRedelegation(t *testing.T, port, seed, name, password string, return results[0] } -func getValidators(t *testing.T, port string) []stake.BechValidator { - // get the account to get the sequence +func getValidators(t *testing.T, port string) []stake.Validator { res, body := Request(t, port, "GET", "/stake/validators", nil) require.Equal(t, http.StatusOK, res.StatusCode, body) - var validators []stake.BechValidator + + var validators []stake.Validator err := cdc.UnmarshalJSON([]byte(body), &validators) require.Nil(t, err) + return validators } -func getValidator(t *testing.T, port string, validatorAddr sdk.AccAddress) stake.BechValidator { - // get the account to get the sequence +func getValidator(t *testing.T, port string, validatorAddr sdk.ValAddress) stake.Validator { res, body := Request(t, port, "GET", fmt.Sprintf("/stake/validators/%s", validatorAddr.String()), nil) require.Equal(t, http.StatusOK, res.StatusCode, body) - var validator stake.BechValidator + + var validator stake.Validator err := cdc.UnmarshalJSON([]byte(body), &validator) require.Nil(t, err) + return validator } +func getValidatorUnbondingDelegations(t *testing.T, port string, validatorAddr sdk.ValAddress) []stake.UnbondingDelegation { + res, body := Request(t, port, "GET", fmt.Sprintf("/stake/validators/%s/unbonding_delegations", validatorAddr.String()), nil) + require.Equal(t, http.StatusOK, res.StatusCode, body) + + var ubds []stake.UnbondingDelegation + err := cdc.UnmarshalJSON([]byte(body), &ubds) + require.Nil(t, err) + + return ubds +} + +func getValidatorRedelegations(t *testing.T, port string, validatorAddr sdk.ValAddress) []stake.Redelegation { + res, body := Request(t, port, "GET", fmt.Sprintf("/stake/validators/%s/redelegations", validatorAddr.String()), nil) + require.Equal(t, http.StatusOK, res.StatusCode, body) + + var reds []stake.Redelegation + err := cdc.UnmarshalJSON([]byte(body), &reds) + require.Nil(t, err) + + return reds +} + // ============= Governance Module ================ func getProposal(t *testing.T, port string, proposalID int64) gov.Proposal { @@ -956,6 +1057,15 @@ func getProposal(t *testing.T, port string, proposalID int64) gov.Proposal { return proposal } +func getDeposits(t *testing.T, port string, proposalID int64) []gov.Deposit { + res, body := Request(t, port, "GET", fmt.Sprintf("/gov/proposals/%d/deposits", proposalID), nil) + require.Equal(t, http.StatusOK, res.StatusCode, body) + var deposits []gov.Deposit + err := cdc.UnmarshalJSON([]byte(body), &deposits) + require.Nil(t, err) + return deposits +} + func getDeposit(t *testing.T, port string, proposalID int64, depositerAddr sdk.AccAddress) gov.Deposit { res, body := Request(t, port, "GET", fmt.Sprintf("/gov/proposals/%d/deposits/%s", proposalID, depositerAddr), nil) require.Equal(t, http.StatusOK, res.StatusCode, body) @@ -1033,8 +1143,8 @@ func getProposalsFilterStatus(t *testing.T, port string, status gov.ProposalStat return proposals } -func doSubmitProposal(t *testing.T, port, seed, name, password string, proposerAddr sdk.AccAddress) (resultTx ctypes.ResultBroadcastTxCommit) { - // get the account to get the sequence +func doSubmitProposal(t *testing.T, port, seed, name, password string, proposerAddr sdk.AccAddress, amount int64) (resultTx ctypes.ResultBroadcastTxCommit) { + acc := getAccount(t, port, proposerAddr) accnum := acc.GetAccountNumber() sequence := acc.GetSequence() @@ -1047,16 +1157,15 @@ func doSubmitProposal(t *testing.T, port, seed, name, password string, proposerA "description": "test", "proposal_type": "Text", "proposer": "%s", - "initial_deposit": [{ "denom": "steak", "amount": "5" }], + "initial_deposit": [{ "denom": "steak", "amount": "%d" }], "base_req": { "name": "%s", "password": "%s", "chain_id": "%s", "account_number":"%d", - "sequence":"%d", - "gas":"100000" + "sequence":"%d" } - }`, proposerAddr, name, password, chainID, accnum, sequence)) + }`, proposerAddr, amount, name, password, chainID, accnum, sequence)) res, body := Request(t, port, "POST", "/gov/proposals", jsonStr) require.Equal(t, http.StatusOK, res.StatusCode, body) @@ -1067,8 +1176,8 @@ func doSubmitProposal(t *testing.T, port, seed, name, password string, proposerA return results } -func doDeposit(t *testing.T, port, seed, name, password string, proposerAddr sdk.AccAddress, proposalID int64) (resultTx ctypes.ResultBroadcastTxCommit) { - // get the account to get the sequence +func doDeposit(t *testing.T, port, seed, name, password string, proposerAddr sdk.AccAddress, proposalID int64, amount int64) (resultTx ctypes.ResultBroadcastTxCommit) { + acc := getAccount(t, port, proposerAddr) accnum := acc.GetAccountNumber() sequence := acc.GetSequence() @@ -1078,16 +1187,15 @@ func doDeposit(t *testing.T, port, seed, name, password string, proposerAddr sdk // deposit on proposal jsonStr := []byte(fmt.Sprintf(`{ "depositer": "%s", - "amount": [{ "denom": "steak", "amount": "5" }], + "amount": [{ "denom": "steak", "amount": "%d" }], "base_req": { "name": "%s", "password": "%s", "chain_id": "%s", "account_number":"%d", - "sequence": "%d", - "gas":"100000" + "sequence": "%d" } - }`, proposerAddr, name, password, chainID, accnum, sequence)) + }`, proposerAddr, amount, name, password, chainID, accnum, sequence)) res, body := Request(t, port, "POST", fmt.Sprintf("/gov/proposals/%d/deposits", proposalID), jsonStr) require.Equal(t, http.StatusOK, res.StatusCode, body) @@ -1115,12 +1223,10 @@ func doVote(t *testing.T, port, seed, name, password string, proposerAddr sdk.Ac "password": "%s", "chain_id": "%s", "account_number": "%d", - "sequence": "%d", - "gas":"100000" + "sequence": "%d" } }`, proposerAddr, name, password, chainID, accnum, sequence)) res, body := Request(t, port, "POST", fmt.Sprintf("/gov/proposals/%d/votes", proposalID), jsonStr) - fmt.Println(res) require.Equal(t, http.StatusOK, res.StatusCode, body) var results ctypes.ResultBroadcastTxCommit diff --git a/client/lcd/root.go b/client/lcd/root.go index bfa62f1cf..10faadd1a 100644 --- a/client/lcd/root.go +++ b/client/lcd/root.go @@ -1,22 +1,24 @@ package lcd import ( + "errors" + "net" "net/http" "os" - client "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/context" - keys "github.com/cosmos/cosmos-sdk/client/keys" - rpc "github.com/cosmos/cosmos-sdk/client/rpc" - tx "github.com/cosmos/cosmos-sdk/client/tx" - "github.com/cosmos/cosmos-sdk/wire" + "github.com/cosmos/cosmos-sdk/client/keys" + "github.com/cosmos/cosmos-sdk/client/rpc" + "github.com/cosmos/cosmos-sdk/client/tx" + "github.com/cosmos/cosmos-sdk/codec" auth "github.com/cosmos/cosmos-sdk/x/auth/client/rest" bank "github.com/cosmos/cosmos-sdk/x/bank/client/rest" gov "github.com/cosmos/cosmos-sdk/x/gov/client/rest" - ibc "github.com/cosmos/cosmos-sdk/x/ibc/client/rest" slashing "github.com/cosmos/cosmos-sdk/x/slashing/client/rest" stake "github.com/cosmos/cosmos-sdk/x/stake/client/rest" "github.com/gorilla/mux" + "github.com/rakyll/statik/fs" "github.com/spf13/cobra" "github.com/spf13/viper" cmn "github.com/tendermint/tendermint/libs/common" @@ -24,35 +26,87 @@ import ( tmserver "github.com/tendermint/tendermint/rpc/lib/server" ) +const ( + flagListenAddr = "laddr" + flagCORS = "cors" + flagMaxOpenConnections = "max-open" + flagInsecure = "insecure" + flagSSLHosts = "ssl-hosts" + flagSSLCertFile = "ssl-certfile" + flagSSLKeyFile = "ssl-keyfile" +) + // ServeCommand will generate a long-running rest server // (aka Light Client Daemon) that exposes functionality similar // to the cli, but over rest -func ServeCommand(cdc *wire.Codec) *cobra.Command { - flagListenAddr := "laddr" - flagCORS := "cors" - flagMaxOpenConnections := "max-open" +func ServeCommand(cdc *codec.Codec) *cobra.Command { cmd := &cobra.Command{ Use: "rest-server", Short: "Start LCD (light-client daemon), a local REST server", - RunE: func(cmd *cobra.Command, args []string) error { + RunE: func(cmd *cobra.Command, args []string) (err error) { listenAddr := viper.GetString(flagListenAddr) handler := createHandler(cdc) + registerSwaggerUI(handler) logger := log.NewTMLogger(log.NewSyncWriter(os.Stdout)).With("module", "rest-server") maxOpen := viper.GetInt(flagMaxOpenConnections) - - listener, err := tmserver.StartHTTPServer( - listenAddr, handler, logger, - tmserver.Config{MaxOpenConnections: maxOpen}, - ) - if err != nil { - return err + sslHosts := viper.GetString(flagSSLHosts) + certFile := viper.GetString(flagSSLCertFile) + keyFile := viper.GetString(flagSSLKeyFile) + cleanupFunc := func() {} + + var listener net.Listener + var fingerprint string + if viper.GetBool(flagInsecure) { + listener, err := tmserver.Listen(listenAddr, tmserver.Config{MaxOpenConnections: maxOpen}) + if err != nil { + return err + } + go func() { + if err = tmserver.StartHTTPServer(listener, handler, logger); err != nil { + panic(err) + } + }() + } else { + if certFile != "" { + // validateCertKeyFiles() is needed to work around tendermint/tendermint#2460 + err = validateCertKeyFiles(certFile, keyFile) + if err != nil { + return err + } + // cert/key pair is provided, read the fingerprint + fingerprint, err = fingerprintFromFile(certFile) + if err != nil { + return err + } + } else { + // if certificate is not supplied, generate a self-signed one + certFile, keyFile, fingerprint, err = genCertKeyFilesAndReturnFingerprint(sslHosts) + if err != nil { + return err + } + cleanupFunc = func() { + os.Remove(certFile) + os.Remove(keyFile) + } + defer cleanupFunc() + } + listener, err := tmserver.Listen(listenAddr, tmserver.Config{MaxOpenConnections: maxOpen}) + if err != nil { + return err + } + go func() { + if err = tmserver.StartHTTPServer(listener, handler, logger); err != nil { + panic(err) + } + }() + logger.Info(fingerprint) } - logger.Info("REST server started") // wait forever and cleanup cmn.TrapSignal(func() { + defer cleanupFunc() err := listener.Close() logger.Error("error closing listener", "err", err) }) @@ -62,15 +116,24 @@ func ServeCommand(cdc *wire.Codec) *cobra.Command { } cmd.Flags().String(flagListenAddr, "tcp://localhost:1317", "The address for the server to listen on") + cmd.Flags().Bool(flagInsecure, false, "Do not set up SSL/TLS layer") + cmd.Flags().String(flagSSLHosts, "", "Comma-separated hostnames and IPs to generate a certificate for") + cmd.Flags().String(flagSSLCertFile, "", "Path to a SSL certificate file. If not supplied, a self-signed certificate will be generated.") + cmd.Flags().String(flagSSLKeyFile, "", "Path to a key file; ignored if a certificate file is not supplied.") cmd.Flags().String(flagCORS, "", "Set the domains that can make CORS requests (* for all)") - cmd.Flags().String(client.FlagChainID, "", "The chain ID to connect to") + cmd.Flags().String(client.FlagChainID, "", "Chain ID of Tendermint node") cmd.Flags().String(client.FlagNode, "tcp://localhost:26657", "Address of the node to connect to") cmd.Flags().Int(flagMaxOpenConnections, 1000, "The number of maximum open connections") + cmd.Flags().Bool(client.FlagTrustNode, false, "Trust connected full node (don't verify proofs for responses)") + cmd.Flags().Bool(client.FlagIndentResponse, false, "Add indent to JSON response") + viper.BindPFlag(client.FlagTrustNode, cmd.Flags().Lookup(client.FlagTrustNode)) + viper.BindPFlag(client.FlagChainID, cmd.Flags().Lookup(client.FlagChainID)) + viper.BindPFlag(client.FlagNode, cmd.Flags().Lookup(client.FlagNode)) return cmd } -func createHandler(cdc *wire.Codec) http.Handler { +func createHandler(cdc *codec.Codec) *mux.Router { r := mux.NewRouter() kb, err := keys.GetKeyBase() //XXX @@ -78,21 +141,42 @@ func createHandler(cdc *wire.Codec) http.Handler { panic(err) } - cliCtx := context.NewCLIContext().WithCodec(cdc).WithLogger(os.Stdout) + cliCtx := context.NewCLIContext().WithCodec(cdc) // TODO: make more functional? aka r = keys.RegisterRoutes(r) r.HandleFunc("/version", CLIVersionRequestHandler).Methods("GET") r.HandleFunc("/node_version", NodeVersionRequestHandler(cliCtx)).Methods("GET") - keys.RegisterRoutes(r) + keys.RegisterRoutes(r, cliCtx.Indent) rpc.RegisterRoutes(cliCtx, r) tx.RegisterRoutes(cliCtx, r, cdc) auth.RegisterRoutes(cliCtx, r, cdc, "acc") bank.RegisterRoutes(cliCtx, r, cdc, kb) - ibc.RegisterRoutes(cliCtx, r, cdc, kb) stake.RegisterRoutes(cliCtx, r, cdc, kb) slashing.RegisterRoutes(cliCtx, r, cdc, kb) gov.RegisterRoutes(cliCtx, r, cdc) return r } + +func registerSwaggerUI(r *mux.Router) { + statikFS, err := fs.New() + if err != nil { + panic(err) + } + staticServer := http.FileServer(statikFS) + r.PathPrefix("/swagger-ui/").Handler(http.StripPrefix("/swagger-ui/", staticServer)) +} + +func validateCertKeyFiles(certFile, keyFile string) error { + if keyFile == "" { + return errors.New("a key file is required") + } + if _, err := os.Stat(certFile); err != nil { + return err + } + if _, err := os.Stat(keyFile); err != nil { + return err + } + return nil +} diff --git a/client/lcd/statik/init.go b/client/lcd/statik/init.go new file mode 100644 index 000000000..9633aeb29 --- /dev/null +++ b/client/lcd/statik/init.go @@ -0,0 +1,3 @@ +package statik + +//This just for fixing the error in importing empty github.com/cosmos/cosmos-sdk/client/lcd/statik diff --git a/client/lcd/swagger-ui/favicon-16x16.png b/client/lcd/swagger-ui/favicon-16x16.png new file mode 100644 index 000000000..0f7e13b0d Binary files /dev/null and b/client/lcd/swagger-ui/favicon-16x16.png differ diff --git a/client/lcd/swagger-ui/favicon-32x32.png b/client/lcd/swagger-ui/favicon-32x32.png new file mode 100644 index 000000000..b0a3352ff Binary files /dev/null and b/client/lcd/swagger-ui/favicon-32x32.png differ diff --git a/client/lcd/swagger-ui/index.html b/client/lcd/swagger-ui/index.html index b7332cb08..0c4855763 100644 --- a/client/lcd/swagger-ui/index.html +++ b/client/lcd/swagger-ui/index.html @@ -1,16 +1,60 @@ - - + + - - - RPC - + + + Swagger UI + + + + + + + +
+ + + + + diff --git a/client/lcd/swagger-ui/oauth2-redirect.html b/client/lcd/swagger-ui/oauth2-redirect.html new file mode 100644 index 000000000..fb68399d2 --- /dev/null +++ b/client/lcd/swagger-ui/oauth2-redirect.html @@ -0,0 +1,67 @@ + + + + + + diff --git a/client/lcd/swagger-ui/swagger-ui-bundle.js b/client/lcd/swagger-ui/swagger-ui-bundle.js new file mode 100644 index 000000000..4491b4b28 --- /dev/null +++ b/client/lcd/swagger-ui/swagger-ui-bundle.js @@ -0,0 +1,41776 @@ +! function(e, t) { + "object" == typeof exports && "object" == typeof module ? module.exports = t() : "function" == typeof define && define.amd ? define([], t) : "object" == typeof exports ? exports.SwaggerUIBundle = t() : e.SwaggerUIBundle = t() +}(this, function() { + return function(e) { + var t = {}; + + function n(r) { + if (t[r]) return t[r].exports; + var o = t[r] = { + i: r, + l: !1, + exports: {} + }; + return e[r].call(o.exports, o, o.exports, n), o.l = !0, o.exports + } + return n.m = e, n.c = t, n.d = function(e, t, r) { + n.o(e, t) || Object.defineProperty(e, t, { + configurable: !1, + enumerable: !0, + get: r + }) + }, n.n = function(e) { + var t = e && e.__esModule ? function() { + return e.default + } : function() { + return e + }; + return n.d(t, "a", t), t + }, n.o = function(e, t) { + return Object.prototype.hasOwnProperty.call(e, t) + }, n.p = "/dist", n(n.s = 445) + }([function(e, t, n) { + "use strict"; + e.exports = n(75) + }, function(e, t, n) { + e.exports = n(853)() + }, function(e, t, n) { + "use strict"; + t.__esModule = !0, t.default = function(e, t) { + if (!(e instanceof t)) throw new TypeError("Cannot call a class as a function") + } + }, function(e, t, n) { + "use strict"; + t.__esModule = !0; + var r, o = n(262), + i = (r = o) && r.__esModule ? r : { + default: r + }; + t.default = function() { + function e(e, t) { + for (var n = 0; n < t.length; n++) { + var r = t[n]; + r.enumerable = r.enumerable || !1, r.configurable = !0, "value" in r && (r.writable = !0), (0, i.default)(e, r.key, r) + } + } + return function(t, n, r) { + return n && e(t.prototype, n), r && e(t, r), t + } + }() + }, function(e, t, n) { + e.exports = { + default: n(766), + __esModule: !0 + } + }, function(e, t, n) { + "use strict"; + t.__esModule = !0; + var r, o = n(45), + i = (r = o) && r.__esModule ? r : { + default: r + }; + t.default = function(e, t) { + if (!e) throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); + return !t || "object" !== (void 0 === t ? "undefined" : (0, i.default)(t)) && "function" != typeof t ? e : t + } + }, function(e, t, n) { + "use strict"; + t.__esModule = !0; + var r = a(n(768)), + o = a(n(349)), + i = a(n(45)); + + function a(e) { + return e && e.__esModule ? e : { + default: e + } + } + t.default = function(e, t) { + if ("function" != typeof t && null !== t) throw new TypeError("Super expression must either be null or a function, not " + (void 0 === t ? "undefined" : (0, i.default)(t))); + e.prototype = (0, o.default)(t && t.prototype, { + constructor: { + value: e, + enumerable: !1, + writable: !0, + configurable: !0 + } + }), t && (r.default ? (0, r.default)(e, t) : e.__proto__ = t) + } + }, function(e, t, n) { + var r; + r = function() { + "use strict"; + var e = Array.prototype.slice; + + function t(e, t) { + t && (e.prototype = Object.create(t.prototype)), e.prototype.constructor = e + } + + function n(e) { + return a(e) ? e : J(e) + } + + function r(e) { + return u(e) ? e : Y(e) + } + + function o(e) { + return s(e) ? e : K(e) + } + + function i(e) { + return a(e) && !l(e) ? e : G(e) + } + + function a(e) { + return !(!e || !e[f]) + } + + function u(e) { + return !(!e || !e[p]) + } + + function s(e) { + return !(!e || !e[d]) + } + + function l(e) { + return u(e) || s(e) + } + + function c(e) { + return !(!e || !e[h]) + } + t(r, n), t(o, n), t(i, n), n.isIterable = a, n.isKeyed = u, n.isIndexed = s, n.isAssociative = l, n.isOrdered = c, n.Keyed = r, n.Indexed = o, n.Set = i; + var f = "@@__IMMUTABLE_ITERABLE__@@", + p = "@@__IMMUTABLE_KEYED__@@", + d = "@@__IMMUTABLE_INDEXED__@@", + h = "@@__IMMUTABLE_ORDERED__@@", + v = 5, + m = 1 << v, + g = m - 1, + y = {}, + b = { + value: !1 + }, + _ = { + value: !1 + }; + + function w(e) { + return e.value = !1, e + } + + function E(e) { + e && (e.value = !0) + } + + function x() {} + + function S(e, t) { + t = t || 0; + for (var n = Math.max(0, e.length - t), r = new Array(n), o = 0; o < n; o++) r[o] = e[o + t]; + return r + } + + function C(e) { + return void 0 === e.size && (e.size = e.__iterate(A)), e.size + } + + function k(e, t) { + if ("number" != typeof t) { + var n = t >>> 0; + if ("" + n !== t || 4294967295 === n) return NaN; + t = n + } + return t < 0 ? C(e) + t : t + } + + function A() { + return !0 + } + + function O(e, t, n) { + return (0 === e || void 0 !== n && e <= -n) && (void 0 === t || void 0 !== n && t >= n) + } + + function P(e, t) { + return M(e, t, 0) + } + + function T(e, t) { + return M(e, t, t) + } + + function M(e, t, n) { + return void 0 === e ? n : e < 0 ? Math.max(0, t + e) : void 0 === t ? e : Math.min(t, e) + } + var I = 0, + j = 1, + N = 2, + R = "function" == typeof Symbol && Symbol.iterator, + D = "@@iterator", + L = R || D; + + function U(e) { + this.next = e + } + + function q(e, t, n, r) { + var o = 0 === e ? t : 1 === e ? n : [t, n]; + return r ? r.value = o : r = { + value: o, + done: !1 + }, r + } + + function F() { + return { + value: void 0, + done: !0 + } + } + + function z(e) { + return !!H(e) + } + + function B(e) { + return e && "function" == typeof e.next + } + + function V(e) { + var t = H(e); + return t && t.call(e) + } + + function H(e) { + var t = e && (R && e[R] || e[D]); + if ("function" == typeof t) return t + } + + function W(e) { + return e && "number" == typeof e.length + } + + function J(e) { + return null === e || void 0 === e ? ie() : a(e) ? e.toSeq() : function(e) { + var t = se(e) || "object" == typeof e && new te(e); + if (!t) throw new TypeError("Expected Array or iterable object of values, or keyed object: " + e); + return t + }(e) + } + + function Y(e) { + return null === e || void 0 === e ? ie().toKeyedSeq() : a(e) ? u(e) ? e.toSeq() : e.fromEntrySeq() : ae(e) + } + + function K(e) { + return null === e || void 0 === e ? ie() : a(e) ? u(e) ? e.entrySeq() : e.toIndexedSeq() : ue(e) + } + + function G(e) { + return (null === e || void 0 === e ? ie() : a(e) ? u(e) ? e.entrySeq() : e : ue(e)).toSetSeq() + } + U.prototype.toString = function() { + return "[Iterator]" + }, U.KEYS = I, U.VALUES = j, U.ENTRIES = N, U.prototype.inspect = U.prototype.toSource = function() { + return this.toString() + }, U.prototype[L] = function() { + return this + }, t(J, n), J.of = function() { + return J(arguments) + }, J.prototype.toSeq = function() { + return this + }, J.prototype.toString = function() { + return this.__toString("Seq {", "}") + }, J.prototype.cacheResult = function() { + return !this._cache && this.__iterateUncached && (this._cache = this.entrySeq().toArray(), this.size = this._cache.length), this + }, J.prototype.__iterate = function(e, t) { + return le(this, e, t, !0) + }, J.prototype.__iterator = function(e, t) { + return ce(this, e, t, !0) + }, t(Y, J), Y.prototype.toKeyedSeq = function() { + return this + }, t(K, J), K.of = function() { + return K(arguments) + }, K.prototype.toIndexedSeq = function() { + return this + }, K.prototype.toString = function() { + return this.__toString("Seq [", "]") + }, K.prototype.__iterate = function(e, t) { + return le(this, e, t, !1) + }, K.prototype.__iterator = function(e, t) { + return ce(this, e, t, !1) + }, t(G, J), G.of = function() { + return G(arguments) + }, G.prototype.toSetSeq = function() { + return this + }, J.isSeq = oe, J.Keyed = Y, J.Set = G, J.Indexed = K; + var $, Z, X, Q = "@@__IMMUTABLE_SEQ__@@"; + + function ee(e) { + this._array = e, this.size = e.length + } + + function te(e) { + var t = Object.keys(e); + this._object = e, this._keys = t, this.size = t.length + } + + function ne(e) { + this._iterable = e, this.size = e.length || e.size + } + + function re(e) { + this._iterator = e, this._iteratorCache = [] + } + + function oe(e) { + return !(!e || !e[Q]) + } + + function ie() { + return $ || ($ = new ee([])) + } + + function ae(e) { + var t = Array.isArray(e) ? new ee(e).fromEntrySeq() : B(e) ? new re(e).fromEntrySeq() : z(e) ? new ne(e).fromEntrySeq() : "object" == typeof e ? new te(e) : void 0; + if (!t) throw new TypeError("Expected Array or iterable object of [k, v] entries, or keyed object: " + e); + return t + } + + function ue(e) { + var t = se(e); + if (!t) throw new TypeError("Expected Array or iterable object of values: " + e); + return t + } + + function se(e) { + return W(e) ? new ee(e) : B(e) ? new re(e) : z(e) ? new ne(e) : void 0 + } + + function le(e, t, n, r) { + var o = e._cache; + if (o) { + for (var i = o.length - 1, a = 0; a <= i; a++) { + var u = o[n ? i - a : a]; + if (!1 === t(u[1], r ? u[0] : a, e)) return a + 1 + } + return a + } + return e.__iterateUncached(t, n) + } + + function ce(e, t, n, r) { + var o = e._cache; + if (o) { + var i = o.length - 1, + a = 0; + return new U(function() { + var e = o[n ? i - a : a]; + return a++ > i ? { + value: void 0, + done: !0 + } : q(t, r ? e[0] : a - 1, e[1]) + }) + } + return e.__iteratorUncached(t, n) + } + + function fe(e, t) { + return t ? function e(t, n, r, o) { + if (Array.isArray(n)) return t.call(o, r, K(n).map(function(r, o) { + return e(t, r, o, n) + })); + if (de(n)) return t.call(o, r, Y(n).map(function(r, o) { + return e(t, r, o, n) + })); + return n + }(t, e, "", { + "": e + }) : pe(e) + } + + function pe(e) { + return Array.isArray(e) ? K(e).map(pe).toList() : de(e) ? Y(e).map(pe).toMap() : e + } + + function de(e) { + return e && (e.constructor === Object || void 0 === e.constructor) + } + + function he(e, t) { + if (e === t || e != e && t != t) return !0; + if (!e || !t) return !1; + if ("function" == typeof e.valueOf && "function" == typeof t.valueOf) { + if ((e = e.valueOf()) === (t = t.valueOf()) || e != e && t != t) return !0; + if (!e || !t) return !1 + } + return !("function" != typeof e.equals || "function" != typeof t.equals || !e.equals(t)) + } + + function ve(e, t) { + if (e === t) return !0; + if (!a(t) || void 0 !== e.size && void 0 !== t.size && e.size !== t.size || void 0 !== e.__hash && void 0 !== t.__hash && e.__hash !== t.__hash || u(e) !== u(t) || s(e) !== s(t) || c(e) !== c(t)) return !1; + if (0 === e.size && 0 === t.size) return !0; + var n = !l(e); + if (c(e)) { + var r = e.entries(); + return t.every(function(e, t) { + var o = r.next().value; + return o && he(o[1], e) && (n || he(o[0], t)) + }) && r.next().done + } + var o = !1; + if (void 0 === e.size) + if (void 0 === t.size) "function" == typeof e.cacheResult && e.cacheResult(); + else { + o = !0; + var i = e; + e = t, t = i + } + var f = !0, + p = t.__iterate(function(t, r) { + if (n ? !e.has(t) : o ? !he(t, e.get(r, y)) : !he(e.get(r, y), t)) return f = !1, !1 + }); + return f && e.size === p + } + + function me(e, t) { + if (!(this instanceof me)) return new me(e, t); + if (this._value = e, this.size = void 0 === t ? 1 / 0 : Math.max(0, t), 0 === this.size) { + if (Z) return Z; + Z = this + } + } + + function ge(e, t) { + if (!e) throw new Error(t) + } + + function ye(e, t, n) { + if (!(this instanceof ye)) return new ye(e, t, n); + if (ge(0 !== n, "Cannot step a Range by 0"), e = e || 0, void 0 === t && (t = 1 / 0), n = void 0 === n ? 1 : Math.abs(n), t < e && (n = -n), this._start = e, this._end = t, this._step = n, this.size = Math.max(0, Math.ceil((t - e) / n - 1) + 1), 0 === this.size) { + if (X) return X; + X = this + } + } + + function be() { + throw TypeError("Abstract") + } + + function _e() {} + + function we() {} + + function Ee() {} + J.prototype[Q] = !0, t(ee, K), ee.prototype.get = function(e, t) { + return this.has(e) ? this._array[k(this, e)] : t + }, ee.prototype.__iterate = function(e, t) { + for (var n = this._array, r = n.length - 1, o = 0; o <= r; o++) + if (!1 === e(n[t ? r - o : o], o, this)) return o + 1; + return o + }, ee.prototype.__iterator = function(e, t) { + var n = this._array, + r = n.length - 1, + o = 0; + return new U(function() { + return o > r ? { + value: void 0, + done: !0 + } : q(e, o, n[t ? r - o++ : o++]) + }) + }, t(te, Y), te.prototype.get = function(e, t) { + return void 0 === t || this.has(e) ? this._object[e] : t + }, te.prototype.has = function(e) { + return this._object.hasOwnProperty(e) + }, te.prototype.__iterate = function(e, t) { + for (var n = this._object, r = this._keys, o = r.length - 1, i = 0; i <= o; i++) { + var a = r[t ? o - i : i]; + if (!1 === e(n[a], a, this)) return i + 1 + } + return i + }, te.prototype.__iterator = function(e, t) { + var n = this._object, + r = this._keys, + o = r.length - 1, + i = 0; + return new U(function() { + var a = r[t ? o - i : i]; + return i++ > o ? { + value: void 0, + done: !0 + } : q(e, a, n[a]) + }) + }, te.prototype[h] = !0, t(ne, K), ne.prototype.__iterateUncached = function(e, t) { + if (t) return this.cacheResult().__iterate(e, t); + var n = V(this._iterable), + r = 0; + if (B(n)) + for (var o; !(o = n.next()).done && !1 !== e(o.value, r++, this);); + return r + }, ne.prototype.__iteratorUncached = function(e, t) { + if (t) return this.cacheResult().__iterator(e, t); + var n = V(this._iterable); + if (!B(n)) return new U(F); + var r = 0; + return new U(function() { + var t = n.next(); + return t.done ? t : q(e, r++, t.value) + }) + }, t(re, K), re.prototype.__iterateUncached = function(e, t) { + if (t) return this.cacheResult().__iterate(e, t); + for (var n, r = this._iterator, o = this._iteratorCache, i = 0; i < o.length;) + if (!1 === e(o[i], i++, this)) return i; + for (; !(n = r.next()).done;) { + var a = n.value; + if (o[i] = a, !1 === e(a, i++, this)) break + } + return i + }, re.prototype.__iteratorUncached = function(e, t) { + if (t) return this.cacheResult().__iterator(e, t); + var n = this._iterator, + r = this._iteratorCache, + o = 0; + return new U(function() { + if (o >= r.length) { + var t = n.next(); + if (t.done) return t; + r[o] = t.value + } + return q(e, o, r[o++]) + }) + }, t(me, K), me.prototype.toString = function() { + return 0 === this.size ? "Repeat []" : "Repeat [ " + this._value + " " + this.size + " times ]" + }, me.prototype.get = function(e, t) { + return this.has(e) ? this._value : t + }, me.prototype.includes = function(e) { + return he(this._value, e) + }, me.prototype.slice = function(e, t) { + var n = this.size; + return O(e, t, n) ? this : new me(this._value, T(t, n) - P(e, n)) + }, me.prototype.reverse = function() { + return this + }, me.prototype.indexOf = function(e) { + return he(this._value, e) ? 0 : -1 + }, me.prototype.lastIndexOf = function(e) { + return he(this._value, e) ? this.size : -1 + }, me.prototype.__iterate = function(e, t) { + for (var n = 0; n < this.size; n++) + if (!1 === e(this._value, n, this)) return n + 1; + return n + }, me.prototype.__iterator = function(e, t) { + var n = this, + r = 0; + return new U(function() { + return r < n.size ? q(e, r++, n._value) : { + value: void 0, + done: !0 + } + }) + }, me.prototype.equals = function(e) { + return e instanceof me ? he(this._value, e._value) : ve(e) + }, t(ye, K), ye.prototype.toString = function() { + return 0 === this.size ? "Range []" : "Range [ " + this._start + "..." + this._end + (1 !== this._step ? " by " + this._step : "") + " ]" + }, ye.prototype.get = function(e, t) { + return this.has(e) ? this._start + k(this, e) * this._step : t + }, ye.prototype.includes = function(e) { + var t = (e - this._start) / this._step; + return t >= 0 && t < this.size && t === Math.floor(t) + }, ye.prototype.slice = function(e, t) { + return O(e, t, this.size) ? this : (e = P(e, this.size), (t = T(t, this.size)) <= e ? new ye(0, 0) : new ye(this.get(e, this._end), this.get(t, this._end), this._step)) + }, ye.prototype.indexOf = function(e) { + var t = e - this._start; + if (t % this._step == 0) { + var n = t / this._step; + if (n >= 0 && n < this.size) return n + } + return -1 + }, ye.prototype.lastIndexOf = function(e) { + return this.indexOf(e) + }, ye.prototype.__iterate = function(e, t) { + for (var n = this.size - 1, r = this._step, o = t ? this._start + n * r : this._start, i = 0; i <= n; i++) { + if (!1 === e(o, i, this)) return i + 1; + o += t ? -r : r + } + return i + }, ye.prototype.__iterator = function(e, t) { + var n = this.size - 1, + r = this._step, + o = t ? this._start + n * r : this._start, + i = 0; + return new U(function() { + var a = o; + return o += t ? -r : r, i > n ? { + value: void 0, + done: !0 + } : q(e, i++, a) + }) + }, ye.prototype.equals = function(e) { + return e instanceof ye ? this._start === e._start && this._end === e._end && this._step === e._step : ve(this, e) + }, t(be, n), t(_e, be), t(we, be), t(Ee, be), be.Keyed = _e, be.Indexed = we, be.Set = Ee; + var xe = "function" == typeof Math.imul && -2 === Math.imul(4294967295, 2) ? Math.imul : function(e, t) { + var n = 65535 & (e |= 0), + r = 65535 & (t |= 0); + return n * r + ((e >>> 16) * r + n * (t >>> 16) << 16 >>> 0) | 0 + }; + + function Se(e) { + return e >>> 1 & 1073741824 | 3221225471 & e + } + + function Ce(e) { + if (!1 === e || null === e || void 0 === e) return 0; + if ("function" == typeof e.valueOf && (!1 === (e = e.valueOf()) || null === e || void 0 === e)) return 0; + if (!0 === e) return 1; + var t = typeof e; + if ("number" === t) { + if (e != e || e === 1 / 0) return 0; + var n = 0 | e; + for (n !== e && (n ^= 4294967295 * e); e > 4294967295;) n ^= e /= 4294967295; + return Se(n) + } + if ("string" === t) return e.length > je ? function(e) { + var t = De[e]; + void 0 === t && (t = ke(e), Re === Ne && (Re = 0, De = {}), Re++, De[e] = t); + return t + }(e) : ke(e); + if ("function" == typeof e.hashCode) return e.hashCode(); + if ("object" === t) return function(e) { + var t; + if (Te && void 0 !== (t = Pe.get(e))) return t; + if (void 0 !== (t = e[Ie])) return t; + if (!Oe) { + if (void 0 !== (t = e.propertyIsEnumerable && e.propertyIsEnumerable[Ie])) return t; + if (void 0 !== (t = function(e) { + if (e && e.nodeType > 0) switch (e.nodeType) { + case 1: + return e.uniqueID; + case 9: + return e.documentElement && e.documentElement.uniqueID + } + }(e))) return t + } + t = ++Me, 1073741824 & Me && (Me = 0); + if (Te) Pe.set(e, t); + else { + if (void 0 !== Ae && !1 === Ae(e)) throw new Error("Non-extensible objects are not allowed as keys."); + if (Oe) Object.defineProperty(e, Ie, { + enumerable: !1, + configurable: !1, + writable: !1, + value: t + }); + else if (void 0 !== e.propertyIsEnumerable && e.propertyIsEnumerable === e.constructor.prototype.propertyIsEnumerable) e.propertyIsEnumerable = function() { + return this.constructor.prototype.propertyIsEnumerable.apply(this, arguments) + }, e.propertyIsEnumerable[Ie] = t; + else { + if (void 0 === e.nodeType) throw new Error("Unable to set a non-enumerable property on object."); + e[Ie] = t + } + } + return t + }(e); + if ("function" == typeof e.toString) return ke(e.toString()); + throw new Error("Value type " + t + " cannot be hashed.") + } + + function ke(e) { + for (var t = 0, n = 0; n < e.length; n++) t = 31 * t + e.charCodeAt(n) | 0; + return Se(t) + } + var Ae = Object.isExtensible, + Oe = function() { + try { + return Object.defineProperty({}, "@", {}), !0 + } catch (e) { + return !1 + } + }(); + var Pe, Te = "function" == typeof WeakMap; + Te && (Pe = new WeakMap); + var Me = 0, + Ie = "__immutablehash__"; + "function" == typeof Symbol && (Ie = Symbol(Ie)); + var je = 16, + Ne = 255, + Re = 0, + De = {}; + + function Le(e) { + ge(e !== 1 / 0, "Cannot perform this action with an infinite size.") + } + + function Ue(e) { + return null === e || void 0 === e ? Xe() : qe(e) && !c(e) ? e : Xe().withMutations(function(t) { + var n = r(e); + Le(n.size), n.forEach(function(e, n) { + return t.set(n, e) + }) + }) + } + + function qe(e) { + return !(!e || !e[ze]) + } + t(Ue, _e), Ue.of = function() { + var t = e.call(arguments, 0); + return Xe().withMutations(function(e) { + for (var n = 0; n < t.length; n += 2) { + if (n + 1 >= t.length) throw new Error("Missing value for key: " + t[n]); + e.set(t[n], t[n + 1]) + } + }) + }, Ue.prototype.toString = function() { + return this.__toString("Map {", "}") + }, Ue.prototype.get = function(e, t) { + return this._root ? this._root.get(0, void 0, e, t) : t + }, Ue.prototype.set = function(e, t) { + return Qe(this, e, t) + }, Ue.prototype.setIn = function(e, t) { + return this.updateIn(e, y, function() { + return t + }) + }, Ue.prototype.remove = function(e) { + return Qe(this, e, y) + }, Ue.prototype.deleteIn = function(e) { + return this.updateIn(e, function() { + return y + }) + }, Ue.prototype.update = function(e, t, n) { + return 1 === arguments.length ? e(this) : this.updateIn([e], t, n) + }, Ue.prototype.updateIn = function(e, t, n) { + n || (n = t, t = void 0); + var r = function e(t, n, r, o) { + var i = t === y; + var a = n.next(); + if (a.done) { + var u = i ? r : t, + s = o(u); + return s === u ? t : s + } + ge(i || t && t.set, "invalid keyPath"); + var l = a.value; + var c = i ? y : t.get(l, y); + var f = e(c, n, r, o); + return f === c ? t : f === y ? t.remove(l) : (i ? Xe() : t).set(l, f) + }(this, nn(e), t, n); + return r === y ? void 0 : r + }, Ue.prototype.clear = function() { + return 0 === this.size ? this : this.__ownerID ? (this.size = 0, this._root = null, this.__hash = void 0, this.__altered = !0, this) : Xe() + }, Ue.prototype.merge = function() { + return rt(this, void 0, arguments) + }, Ue.prototype.mergeWith = function(t) { + return rt(this, t, e.call(arguments, 1)) + }, Ue.prototype.mergeIn = function(t) { + var n = e.call(arguments, 1); + return this.updateIn(t, Xe(), function(e) { + return "function" == typeof e.merge ? e.merge.apply(e, n) : n[n.length - 1] + }) + }, Ue.prototype.mergeDeep = function() { + return rt(this, ot, arguments) + }, Ue.prototype.mergeDeepWith = function(t) { + var n = e.call(arguments, 1); + return rt(this, it(t), n) + }, Ue.prototype.mergeDeepIn = function(t) { + var n = e.call(arguments, 1); + return this.updateIn(t, Xe(), function(e) { + return "function" == typeof e.mergeDeep ? e.mergeDeep.apply(e, n) : n[n.length - 1] + }) + }, Ue.prototype.sort = function(e) { + return Pt(Wt(this, e)) + }, Ue.prototype.sortBy = function(e, t) { + return Pt(Wt(this, t, e)) + }, Ue.prototype.withMutations = function(e) { + var t = this.asMutable(); + return e(t), t.wasAltered() ? t.__ensureOwner(this.__ownerID) : this + }, Ue.prototype.asMutable = function() { + return this.__ownerID ? this : this.__ensureOwner(new x) + }, Ue.prototype.asImmutable = function() { + return this.__ensureOwner() + }, Ue.prototype.wasAltered = function() { + return this.__altered + }, Ue.prototype.__iterator = function(e, t) { + return new Ke(this, e, t) + }, Ue.prototype.__iterate = function(e, t) { + var n = this, + r = 0; + return this._root && this._root.iterate(function(t) { + return r++, e(t[1], t[0], n) + }, t), r + }, Ue.prototype.__ensureOwner = function(e) { + return e === this.__ownerID ? this : e ? Ze(this.size, this._root, e, this.__hash) : (this.__ownerID = e, this.__altered = !1, this) + }, Ue.isMap = qe; + var Fe, ze = "@@__IMMUTABLE_MAP__@@", + Be = Ue.prototype; + + function Ve(e, t) { + this.ownerID = e, this.entries = t + } + + function He(e, t, n) { + this.ownerID = e, this.bitmap = t, this.nodes = n + } + + function We(e, t, n) { + this.ownerID = e, this.count = t, this.nodes = n + } + + function Je(e, t, n) { + this.ownerID = e, this.keyHash = t, this.entries = n + } + + function Ye(e, t, n) { + this.ownerID = e, this.keyHash = t, this.entry = n + } + + function Ke(e, t, n) { + this._type = t, this._reverse = n, this._stack = e._root && $e(e._root) + } + + function Ge(e, t) { + return q(e, t[0], t[1]) + } + + function $e(e, t) { + return { + node: e, + index: 0, + __prev: t + } + } + + function Ze(e, t, n, r) { + var o = Object.create(Be); + return o.size = e, o._root = t, o.__ownerID = n, o.__hash = r, o.__altered = !1, o + } + + function Xe() { + return Fe || (Fe = Ze(0)) + } + + function Qe(e, t, n) { + var r, o; + if (e._root) { + var i = w(b), + a = w(_); + if (r = et(e._root, e.__ownerID, 0, void 0, t, n, i, a), !a.value) return e; + o = e.size + (i.value ? n === y ? -1 : 1 : 0) + } else { + if (n === y) return e; + o = 1, r = new Ve(e.__ownerID, [ + [t, n] + ]) + } + return e.__ownerID ? (e.size = o, e._root = r, e.__hash = void 0, e.__altered = !0, e) : r ? Ze(o, r) : Xe() + } + + function et(e, t, n, r, o, i, a, u) { + return e ? e.update(t, n, r, o, i, a, u) : i === y ? e : (E(u), E(a), new Ye(t, r, [o, i])) + } + + function tt(e) { + return e.constructor === Ye || e.constructor === Je + } + + function nt(e, t, n, r, o) { + if (e.keyHash === r) return new Je(t, r, [e.entry, o]); + var i, a = (0 === n ? e.keyHash : e.keyHash >>> n) & g, + u = (0 === n ? r : r >>> n) & g; + return new He(t, 1 << a | 1 << u, a === u ? [nt(e, t, n + v, r, o)] : (i = new Ye(t, r, o), a < u ? [e, i] : [i, e])) + } + + function rt(e, t, n) { + for (var o = [], i = 0; i < n.length; i++) { + var u = n[i], + s = r(u); + a(u) || (s = s.map(function(e) { + return fe(e) + })), o.push(s) + } + return at(e, t, o) + } + + function ot(e, t, n) { + return e && e.mergeDeep && a(t) ? e.mergeDeep(t) : he(e, t) ? e : t + } + + function it(e) { + return function(t, n, r) { + if (t && t.mergeDeepWith && a(n)) return t.mergeDeepWith(e, n); + var o = e(t, n, r); + return he(t, o) ? t : o + } + } + + function at(e, t, n) { + return 0 === (n = n.filter(function(e) { + return 0 !== e.size + })).length ? e : 0 !== e.size || e.__ownerID || 1 !== n.length ? e.withMutations(function(e) { + for (var r = t ? function(n, r) { + e.update(r, y, function(e) { + return e === y ? n : t(e, n, r) + }) + } : function(t, n) { + e.set(n, t) + }, o = 0; o < n.length; o++) n[o].forEach(r) + }) : e.constructor(n[0]) + } + + function ut(e) { + return e = (e = (858993459 & (e -= e >> 1 & 1431655765)) + (e >> 2 & 858993459)) + (e >> 4) & 252645135, e += e >> 8, 127 & (e += e >> 16) + } + + function st(e, t, n, r) { + var o = r ? e : S(e); + return o[t] = n, o + } + Be[ze] = !0, Be.delete = Be.remove, Be.removeIn = Be.deleteIn, Ve.prototype.get = function(e, t, n, r) { + for (var o = this.entries, i = 0, a = o.length; i < a; i++) + if (he(n, o[i][0])) return o[i][1]; + return r + }, Ve.prototype.update = function(e, t, n, r, o, i, a) { + for (var u = o === y, s = this.entries, l = 0, c = s.length; l < c && !he(r, s[l][0]); l++); + var f = l < c; + if (f ? s[l][1] === o : u) return this; + if (E(a), (u || !f) && E(i), !u || 1 !== s.length) { + if (!f && !u && s.length >= lt) return function(e, t, n, r) { + e || (e = new x); + for (var o = new Ye(e, Ce(n), [n, r]), i = 0; i < t.length; i++) { + var a = t[i]; + o = o.update(e, 0, void 0, a[0], a[1]) + } + return o + }(e, s, r, o); + var p = e && e === this.ownerID, + d = p ? s : S(s); + return f ? u ? l === c - 1 ? d.pop() : d[l] = d.pop() : d[l] = [r, o] : d.push([r, o]), p ? (this.entries = d, this) : new Ve(e, d) + } + }, He.prototype.get = function(e, t, n, r) { + void 0 === t && (t = Ce(n)); + var o = 1 << ((0 === e ? t : t >>> e) & g), + i = this.bitmap; + return 0 == (i & o) ? r : this.nodes[ut(i & o - 1)].get(e + v, t, n, r) + }, He.prototype.update = function(e, t, n, r, o, i, a) { + void 0 === n && (n = Ce(r)); + var u = (0 === t ? n : n >>> t) & g, + s = 1 << u, + l = this.bitmap, + c = 0 != (l & s); + if (!c && o === y) return this; + var f = ut(l & s - 1), + p = this.nodes, + d = c ? p[f] : void 0, + h = et(d, e, t + v, n, r, o, i, a); + if (h === d) return this; + if (!c && h && p.length >= ct) return function(e, t, n, r, o) { + for (var i = 0, a = new Array(m), u = 0; 0 !== n; u++, n >>>= 1) a[u] = 1 & n ? t[i++] : void 0; + return a[r] = o, new We(e, i + 1, a) + }(e, p, l, u, h); + if (c && !h && 2 === p.length && tt(p[1 ^ f])) return p[1 ^ f]; + if (c && h && 1 === p.length && tt(h)) return h; + var b = e && e === this.ownerID, + _ = c ? h ? l : l ^ s : l | s, + w = c ? h ? st(p, f, h, b) : function(e, t, n) { + var r = e.length - 1; + if (n && t === r) return e.pop(), e; + for (var o = new Array(r), i = 0, a = 0; a < r; a++) a === t && (i = 1), o[a] = e[a + i]; + return o + }(p, f, b) : function(e, t, n, r) { + var o = e.length + 1; + if (r && t + 1 === o) return e[t] = n, e; + for (var i = new Array(o), a = 0, u = 0; u < o; u++) u === t ? (i[u] = n, a = -1) : i[u] = e[u + a]; + return i + }(p, f, h, b); + return b ? (this.bitmap = _, this.nodes = w, this) : new He(e, _, w) + }, We.prototype.get = function(e, t, n, r) { + void 0 === t && (t = Ce(n)); + var o = (0 === e ? t : t >>> e) & g, + i = this.nodes[o]; + return i ? i.get(e + v, t, n, r) : r + }, We.prototype.update = function(e, t, n, r, o, i, a) { + void 0 === n && (n = Ce(r)); + var u = (0 === t ? n : n >>> t) & g, + s = o === y, + l = this.nodes, + c = l[u]; + if (s && !c) return this; + var f = et(c, e, t + v, n, r, o, i, a); + if (f === c) return this; + var p = this.count; + if (c) { + if (!f && --p < ft) return function(e, t, n, r) { + for (var o = 0, i = 0, a = new Array(n), u = 0, s = 1, l = t.length; u < l; u++, s <<= 1) { + var c = t[u]; + void 0 !== c && u !== r && (o |= s, a[i++] = c) + } + return new He(e, o, a) + }(e, l, p, u) + } else p++; + var d = e && e === this.ownerID, + h = st(l, u, f, d); + return d ? (this.count = p, this.nodes = h, this) : new We(e, p, h) + }, Je.prototype.get = function(e, t, n, r) { + for (var o = this.entries, i = 0, a = o.length; i < a; i++) + if (he(n, o[i][0])) return o[i][1]; + return r + }, Je.prototype.update = function(e, t, n, r, o, i, a) { + void 0 === n && (n = Ce(r)); + var u = o === y; + if (n !== this.keyHash) return u ? this : (E(a), E(i), nt(this, e, t, n, [r, o])); + for (var s = this.entries, l = 0, c = s.length; l < c && !he(r, s[l][0]); l++); + var f = l < c; + if (f ? s[l][1] === o : u) return this; + if (E(a), (u || !f) && E(i), u && 2 === c) return new Ye(e, this.keyHash, s[1 ^ l]); + var p = e && e === this.ownerID, + d = p ? s : S(s); + return f ? u ? l === c - 1 ? d.pop() : d[l] = d.pop() : d[l] = [r, o] : d.push([r, o]), p ? (this.entries = d, this) : new Je(e, this.keyHash, d) + }, Ye.prototype.get = function(e, t, n, r) { + return he(n, this.entry[0]) ? this.entry[1] : r + }, Ye.prototype.update = function(e, t, n, r, o, i, a) { + var u = o === y, + s = he(r, this.entry[0]); + return (s ? o === this.entry[1] : u) ? this : (E(a), u ? void E(i) : s ? e && e === this.ownerID ? (this.entry[1] = o, this) : new Ye(e, this.keyHash, [r, o]) : (E(i), nt(this, e, t, Ce(r), [r, o]))) + }, Ve.prototype.iterate = Je.prototype.iterate = function(e, t) { + for (var n = this.entries, r = 0, o = n.length - 1; r <= o; r++) + if (!1 === e(n[t ? o - r : r])) return !1 + }, He.prototype.iterate = We.prototype.iterate = function(e, t) { + for (var n = this.nodes, r = 0, o = n.length - 1; r <= o; r++) { + var i = n[t ? o - r : r]; + if (i && !1 === i.iterate(e, t)) return !1 + } + }, Ye.prototype.iterate = function(e, t) { + return e(this.entry) + }, t(Ke, U), Ke.prototype.next = function() { + for (var e = this._type, t = this._stack; t;) { + var n, r = t.node, + o = t.index++; + if (r.entry) { + if (0 === o) return Ge(e, r.entry) + } else if (r.entries) { + if (o <= (n = r.entries.length - 1)) return Ge(e, r.entries[this._reverse ? n - o : o]) + } else if (o <= (n = r.nodes.length - 1)) { + var i = r.nodes[this._reverse ? n - o : o]; + if (i) { + if (i.entry) return Ge(e, i.entry); + t = this._stack = $e(i, t) + } + continue + } + t = this._stack = this._stack.__prev + } + return { + value: void 0, + done: !0 + } + }; + var lt = m / 4, + ct = m / 2, + ft = m / 4; + + function pt(e) { + var t = Et(); + if (null === e || void 0 === e) return t; + if (dt(e)) return e; + var n = o(e), + r = n.size; + return 0 === r ? t : (Le(r), r > 0 && r < m ? wt(0, r, v, null, new mt(n.toArray())) : t.withMutations(function(e) { + e.setSize(r), n.forEach(function(t, n) { + return e.set(n, t) + }) + })) + } + + function dt(e) { + return !(!e || !e[ht]) + } + t(pt, we), pt.of = function() { + return this(arguments) + }, pt.prototype.toString = function() { + return this.__toString("List [", "]") + }, pt.prototype.get = function(e, t) { + if ((e = k(this, e)) >= 0 && e < this.size) { + var n = Ct(this, e += this._origin); + return n && n.array[e & g] + } + return t + }, pt.prototype.set = function(e, t) { + return function(e, t, n) { + if ((t = k(e, t)) != t) return e; + if (t >= e.size || t < 0) return e.withMutations(function(e) { + t < 0 ? kt(e, t).set(0, n) : kt(e, 0, t + 1).set(t, n) + }); + t += e._origin; + var r = e._tail, + o = e._root, + i = w(_); + t >= Ot(e._capacity) ? r = xt(r, e.__ownerID, 0, t, n, i) : o = xt(o, e.__ownerID, e._level, t, n, i); + if (!i.value) return e; + if (e.__ownerID) return e._root = o, e._tail = r, e.__hash = void 0, e.__altered = !0, e; + return wt(e._origin, e._capacity, e._level, o, r) + }(this, e, t) + }, pt.prototype.remove = function(e) { + return this.has(e) ? 0 === e ? this.shift() : e === this.size - 1 ? this.pop() : this.splice(e, 1) : this + }, pt.prototype.insert = function(e, t) { + return this.splice(e, 0, t) + }, pt.prototype.clear = function() { + return 0 === this.size ? this : this.__ownerID ? (this.size = this._origin = this._capacity = 0, this._level = v, this._root = this._tail = null, this.__hash = void 0, this.__altered = !0, this) : Et() + }, pt.prototype.push = function() { + var e = arguments, + t = this.size; + return this.withMutations(function(n) { + kt(n, 0, t + e.length); + for (var r = 0; r < e.length; r++) n.set(t + r, e[r]) + }) + }, pt.prototype.pop = function() { + return kt(this, 0, -1) + }, pt.prototype.unshift = function() { + var e = arguments; + return this.withMutations(function(t) { + kt(t, -e.length); + for (var n = 0; n < e.length; n++) t.set(n, e[n]) + }) + }, pt.prototype.shift = function() { + return kt(this, 1) + }, pt.prototype.merge = function() { + return At(this, void 0, arguments) + }, pt.prototype.mergeWith = function(t) { + return At(this, t, e.call(arguments, 1)) + }, pt.prototype.mergeDeep = function() { + return At(this, ot, arguments) + }, pt.prototype.mergeDeepWith = function(t) { + var n = e.call(arguments, 1); + return At(this, it(t), n) + }, pt.prototype.setSize = function(e) { + return kt(this, 0, e) + }, pt.prototype.slice = function(e, t) { + var n = this.size; + return O(e, t, n) ? this : kt(this, P(e, n), T(t, n)) + }, pt.prototype.__iterator = function(e, t) { + var n = 0, + r = _t(this, t); + return new U(function() { + var t = r(); + return t === bt ? { + value: void 0, + done: !0 + } : q(e, n++, t) + }) + }, pt.prototype.__iterate = function(e, t) { + for (var n, r = 0, o = _t(this, t); + (n = o()) !== bt && !1 !== e(n, r++, this);); + return r + }, pt.prototype.__ensureOwner = function(e) { + return e === this.__ownerID ? this : e ? wt(this._origin, this._capacity, this._level, this._root, this._tail, e, this.__hash) : (this.__ownerID = e, this) + }, pt.isList = dt; + var ht = "@@__IMMUTABLE_LIST__@@", + vt = pt.prototype; + + function mt(e, t) { + this.array = e, this.ownerID = t + } + vt[ht] = !0, vt.delete = vt.remove, vt.setIn = Be.setIn, vt.deleteIn = vt.removeIn = Be.removeIn, vt.update = Be.update, vt.updateIn = Be.updateIn, vt.mergeIn = Be.mergeIn, vt.mergeDeepIn = Be.mergeDeepIn, vt.withMutations = Be.withMutations, vt.asMutable = Be.asMutable, vt.asImmutable = Be.asImmutable, vt.wasAltered = Be.wasAltered, mt.prototype.removeBefore = function(e, t, n) { + if (n === t ? 1 << t : 0 === this.array.length) return this; + var r = n >>> t & g; + if (r >= this.array.length) return new mt([], e); + var o, i = 0 === r; + if (t > 0) { + var a = this.array[r]; + if ((o = a && a.removeBefore(e, t - v, n)) === a && i) return this + } + if (i && !o) return this; + var u = St(this, e); + if (!i) + for (var s = 0; s < r; s++) u.array[s] = void 0; + return o && (u.array[r] = o), u + }, mt.prototype.removeAfter = function(e, t, n) { + if (n === (t ? 1 << t : 0) || 0 === this.array.length) return this; + var r, o = n - 1 >>> t & g; + if (o >= this.array.length) return this; + if (t > 0) { + var i = this.array[o]; + if ((r = i && i.removeAfter(e, t - v, n)) === i && o === this.array.length - 1) return this + } + var a = St(this, e); + return a.array.splice(o + 1), r && (a.array[o] = r), a + }; + var gt, yt, bt = {}; + + function _t(e, t) { + var n = e._origin, + r = e._capacity, + o = Ot(r), + i = e._tail; + return a(e._root, e._level, 0); + + function a(e, u, s) { + return 0 === u ? function(e, a) { + var u = a === o ? i && i.array : e && e.array, + s = a > n ? 0 : n - a, + l = r - a; + l > m && (l = m); + return function() { + if (s === l) return bt; + var e = t ? --l : s++; + return u && u[e] + } + }(e, s) : function(e, o, i) { + var u, s = e && e.array, + l = i > n ? 0 : n - i >> o, + c = 1 + (r - i >> o); + c > m && (c = m); + return function() { + for (;;) { + if (u) { + var e = u(); + if (e !== bt) return e; + u = null + } + if (l === c) return bt; + var n = t ? --c : l++; + u = a(s && s[n], o - v, i + (n << o)) + } + } + }(e, u, s) + } + } + + function wt(e, t, n, r, o, i, a) { + var u = Object.create(vt); + return u.size = t - e, u._origin = e, u._capacity = t, u._level = n, u._root = r, u._tail = o, u.__ownerID = i, u.__hash = a, u.__altered = !1, u + } + + function Et() { + return gt || (gt = wt(0, 0, v)) + } + + function xt(e, t, n, r, o, i) { + var a, u = r >>> n & g, + s = e && u < e.array.length; + if (!s && void 0 === o) return e; + if (n > 0) { + var l = e && e.array[u], + c = xt(l, t, n - v, r, o, i); + return c === l ? e : ((a = St(e, t)).array[u] = c, a) + } + return s && e.array[u] === o ? e : (E(i), a = St(e, t), void 0 === o && u === a.array.length - 1 ? a.array.pop() : a.array[u] = o, a) + } + + function St(e, t) { + return t && e && t === e.ownerID ? e : new mt(e ? e.array.slice() : [], t) + } + + function Ct(e, t) { + if (t >= Ot(e._capacity)) return e._tail; + if (t < 1 << e._level + v) { + for (var n = e._root, r = e._level; n && r > 0;) n = n.array[t >>> r & g], r -= v; + return n + } + } + + function kt(e, t, n) { + void 0 !== t && (t |= 0), void 0 !== n && (n |= 0); + var r = e.__ownerID || new x, + o = e._origin, + i = e._capacity, + a = o + t, + u = void 0 === n ? i : n < 0 ? i + n : o + n; + if (a === o && u === i) return e; + if (a >= u) return e.clear(); + for (var s = e._level, l = e._root, c = 0; a + c < 0;) l = new mt(l && l.array.length ? [void 0, l] : [], r), c += 1 << (s += v); + c && (a += c, o += c, u += c, i += c); + for (var f = Ot(i), p = Ot(u); p >= 1 << s + v;) l = new mt(l && l.array.length ? [l] : [], r), s += v; + var d = e._tail, + h = p < f ? Ct(e, u - 1) : p > f ? new mt([], r) : d; + if (d && p > f && a < i && d.array.length) { + for (var m = l = St(l, r), y = s; y > v; y -= v) { + var b = f >>> y & g; + m = m.array[b] = St(m.array[b], r) + } + m.array[f >>> v & g] = d + } + if (u < i && (h = h && h.removeAfter(r, 0, u)), a >= p) a -= p, u -= p, s = v, l = null, h = h && h.removeBefore(r, 0, a); + else if (a > o || p < f) { + for (c = 0; l;) { + var _ = a >>> s & g; + if (_ !== p >>> s & g) break; + _ && (c += (1 << s) * _), s -= v, l = l.array[_] + } + l && a > o && (l = l.removeBefore(r, s, a - c)), l && p < f && (l = l.removeAfter(r, s, p - c)), c && (a -= c, u -= c) + } + return e.__ownerID ? (e.size = u - a, e._origin = a, e._capacity = u, e._level = s, e._root = l, e._tail = h, e.__hash = void 0, e.__altered = !0, e) : wt(a, u, s, l, h) + } + + function At(e, t, n) { + for (var r = [], i = 0, u = 0; u < n.length; u++) { + var s = n[u], + l = o(s); + l.size > i && (i = l.size), a(s) || (l = l.map(function(e) { + return fe(e) + })), r.push(l) + } + return i > e.size && (e = e.setSize(i)), at(e, t, r) + } + + function Ot(e) { + return e < m ? 0 : e - 1 >>> v << v + } + + function Pt(e) { + return null === e || void 0 === e ? It() : Tt(e) ? e : It().withMutations(function(t) { + var n = r(e); + Le(n.size), n.forEach(function(e, n) { + return t.set(n, e) + }) + }) + } + + function Tt(e) { + return qe(e) && c(e) + } + + function Mt(e, t, n, r) { + var o = Object.create(Pt.prototype); + return o.size = e ? e.size : 0, o._map = e, o._list = t, o.__ownerID = n, o.__hash = r, o + } + + function It() { + return yt || (yt = Mt(Xe(), Et())) + } + + function jt(e, t, n) { + var r, o, i = e._map, + a = e._list, + u = i.get(t), + s = void 0 !== u; + if (n === y) { + if (!s) return e; + a.size >= m && a.size >= 2 * i.size ? (r = (o = a.filter(function(e, t) { + return void 0 !== e && u !== t + })).toKeyedSeq().map(function(e) { + return e[0] + }).flip().toMap(), e.__ownerID && (r.__ownerID = o.__ownerID = e.__ownerID)) : (r = i.remove(t), o = u === a.size - 1 ? a.pop() : a.set(u, void 0)) + } else if (s) { + if (n === a.get(u)[1]) return e; + r = i, o = a.set(u, [t, n]) + } else r = i.set(t, a.size), o = a.set(a.size, [t, n]); + return e.__ownerID ? (e.size = r.size, e._map = r, e._list = o, e.__hash = void 0, e) : Mt(r, o) + } + + function Nt(e, t) { + this._iter = e, this._useKeys = t, this.size = e.size + } + + function Rt(e) { + this._iter = e, this.size = e.size + } + + function Dt(e) { + this._iter = e, this.size = e.size + } + + function Lt(e) { + this._iter = e, this.size = e.size + } + + function Ut(e) { + var t = Qt(e); + return t._iter = e, t.size = e.size, t.flip = function() { + return e + }, t.reverse = function() { + var t = e.reverse.apply(this); + return t.flip = function() { + return e.reverse() + }, t + }, t.has = function(t) { + return e.includes(t) + }, t.includes = function(t) { + return e.has(t) + }, t.cacheResult = en, t.__iterateUncached = function(t, n) { + var r = this; + return e.__iterate(function(e, n) { + return !1 !== t(n, e, r) + }, n) + }, t.__iteratorUncached = function(t, n) { + if (t === N) { + var r = e.__iterator(t, n); + return new U(function() { + var e = r.next(); + if (!e.done) { + var t = e.value[0]; + e.value[0] = e.value[1], e.value[1] = t + } + return e + }) + } + return e.__iterator(t === j ? I : j, n) + }, t + } + + function qt(e, t, n) { + var r = Qt(e); + return r.size = e.size, r.has = function(t) { + return e.has(t) + }, r.get = function(r, o) { + var i = e.get(r, y); + return i === y ? o : t.call(n, i, r, e) + }, r.__iterateUncached = function(r, o) { + var i = this; + return e.__iterate(function(e, o, a) { + return !1 !== r(t.call(n, e, o, a), o, i) + }, o) + }, r.__iteratorUncached = function(r, o) { + var i = e.__iterator(N, o); + return new U(function() { + var o = i.next(); + if (o.done) return o; + var a = o.value, + u = a[0]; + return q(r, u, t.call(n, a[1], u, e), o) + }) + }, r + } + + function Ft(e, t) { + var n = Qt(e); + return n._iter = e, n.size = e.size, n.reverse = function() { + return e + }, e.flip && (n.flip = function() { + var t = Ut(e); + return t.reverse = function() { + return e.flip() + }, t + }), n.get = function(n, r) { + return e.get(t ? n : -1 - n, r) + }, n.has = function(n) { + return e.has(t ? n : -1 - n) + }, n.includes = function(t) { + return e.includes(t) + }, n.cacheResult = en, n.__iterate = function(t, n) { + var r = this; + return e.__iterate(function(e, n) { + return t(e, n, r) + }, !n) + }, n.__iterator = function(t, n) { + return e.__iterator(t, !n) + }, n + } + + function zt(e, t, n, r) { + var o = Qt(e); + return r && (o.has = function(r) { + var o = e.get(r, y); + return o !== y && !!t.call(n, o, r, e) + }, o.get = function(r, o) { + var i = e.get(r, y); + return i !== y && t.call(n, i, r, e) ? i : o + }), o.__iterateUncached = function(o, i) { + var a = this, + u = 0; + return e.__iterate(function(e, i, s) { + if (t.call(n, e, i, s)) return u++, o(e, r ? i : u - 1, a) + }, i), u + }, o.__iteratorUncached = function(o, i) { + var a = e.__iterator(N, i), + u = 0; + return new U(function() { + for (;;) { + var i = a.next(); + if (i.done) return i; + var s = i.value, + l = s[0], + c = s[1]; + if (t.call(n, c, l, e)) return q(o, r ? l : u++, c, i) + } + }) + }, o + } + + function Bt(e, t, n, r) { + var o = e.size; + if (void 0 !== t && (t |= 0), void 0 !== n && (n === 1 / 0 ? n = o : n |= 0), O(t, n, o)) return e; + var i = P(t, o), + a = T(n, o); + if (i != i || a != a) return Bt(e.toSeq().cacheResult(), t, n, r); + var u, s = a - i; + s == s && (u = s < 0 ? 0 : s); + var l = Qt(e); + return l.size = 0 === u ? u : e.size && u || void 0, !r && oe(e) && u >= 0 && (l.get = function(t, n) { + return (t = k(this, t)) >= 0 && t < u ? e.get(t + i, n) : n + }), l.__iterateUncached = function(t, n) { + var o = this; + if (0 === u) return 0; + if (n) return this.cacheResult().__iterate(t, n); + var a = 0, + s = !0, + l = 0; + return e.__iterate(function(e, n) { + if (!s || !(s = a++ < i)) return l++, !1 !== t(e, r ? n : l - 1, o) && l !== u + }), l + }, l.__iteratorUncached = function(t, n) { + if (0 !== u && n) return this.cacheResult().__iterator(t, n); + var o = 0 !== u && e.__iterator(t, n), + a = 0, + s = 0; + return new U(function() { + for (; a++ < i;) o.next(); + if (++s > u) return { + value: void 0, + done: !0 + }; + var e = o.next(); + return r || t === j ? e : q(t, s - 1, t === I ? void 0 : e.value[1], e) + }) + }, l + } + + function Vt(e, t, n, r) { + var o = Qt(e); + return o.__iterateUncached = function(o, i) { + var a = this; + if (i) return this.cacheResult().__iterate(o, i); + var u = !0, + s = 0; + return e.__iterate(function(e, i, l) { + if (!u || !(u = t.call(n, e, i, l))) return s++, o(e, r ? i : s - 1, a) + }), s + }, o.__iteratorUncached = function(o, i) { + var a = this; + if (i) return this.cacheResult().__iterator(o, i); + var u = e.__iterator(N, i), + s = !0, + l = 0; + return new U(function() { + var e, i, c; + do { + if ((e = u.next()).done) return r || o === j ? e : q(o, l++, o === I ? void 0 : e.value[1], e); + var f = e.value; + i = f[0], c = f[1], s && (s = t.call(n, c, i, a)) + } while (s); + return o === N ? e : q(o, i, c, e) + }) + }, o + } + + function Ht(e, t, n) { + var r = Qt(e); + return r.__iterateUncached = function(r, o) { + var i = 0, + u = !1; + return function e(s, l) { + var c = this; + s.__iterate(function(o, s) { + return (!t || l < t) && a(o) ? e(o, l + 1) : !1 === r(o, n ? s : i++, c) && (u = !0), !u + }, o) + }(e, 0), i + }, r.__iteratorUncached = function(r, o) { + var i = e.__iterator(r, o), + u = [], + s = 0; + return new U(function() { + for (; i;) { + var e = i.next(); + if (!1 === e.done) { + var l = e.value; + if (r === N && (l = l[1]), t && !(u.length < t) || !a(l)) return n ? e : q(r, s++, l, e); + u.push(i), i = l.__iterator(r, o) + } else i = u.pop() + } + return { + value: void 0, + done: !0 + } + }) + }, r + } + + function Wt(e, t, n) { + t || (t = tn); + var r = u(e), + o = 0, + i = e.toSeq().map(function(t, r) { + return [r, t, o++, n ? n(t, r, e) : t] + }).toArray(); + return i.sort(function(e, n) { + return t(e[3], n[3]) || e[2] - n[2] + }).forEach(r ? function(e, t) { + i[t].length = 2 + } : function(e, t) { + i[t] = e[1] + }), r ? Y(i) : s(e) ? K(i) : G(i) + } + + function Jt(e, t, n) { + if (t || (t = tn), n) { + var r = e.toSeq().map(function(t, r) { + return [t, n(t, r, e)] + }).reduce(function(e, n) { + return Yt(t, e[1], n[1]) ? n : e + }); + return r && r[0] + } + return e.reduce(function(e, n) { + return Yt(t, e, n) ? n : e + }) + } + + function Yt(e, t, n) { + var r = e(n, t); + return 0 === r && n !== t && (void 0 === n || null === n || n != n) || r > 0 + } + + function Kt(e, t, r) { + var o = Qt(e); + return o.size = new ee(r).map(function(e) { + return e.size + }).min(), o.__iterate = function(e, t) { + for (var n, r = this.__iterator(j, t), o = 0; !(n = r.next()).done && !1 !== e(n.value, o++, this);); + return o + }, o.__iteratorUncached = function(e, o) { + var i = r.map(function(e) { + return e = n(e), V(o ? e.reverse() : e) + }), + a = 0, + u = !1; + return new U(function() { + var n; + return u || (n = i.map(function(e) { + return e.next() + }), u = n.some(function(e) { + return e.done + })), u ? { + value: void 0, + done: !0 + } : q(e, a++, t.apply(null, n.map(function(e) { + return e.value + }))) + }) + }, o + } + + function Gt(e, t) { + return oe(e) ? t : e.constructor(t) + } + + function $t(e) { + if (e !== Object(e)) throw new TypeError("Expected [K, V] tuple: " + e) + } + + function Zt(e) { + return Le(e.size), C(e) + } + + function Xt(e) { + return u(e) ? r : s(e) ? o : i + } + + function Qt(e) { + return Object.create((u(e) ? Y : s(e) ? K : G).prototype) + } + + function en() { + return this._iter.cacheResult ? (this._iter.cacheResult(), this.size = this._iter.size, this) : J.prototype.cacheResult.call(this) + } + + function tn(e, t) { + return e > t ? 1 : e < t ? -1 : 0 + } + + function nn(e) { + var t = V(e); + if (!t) { + if (!W(e)) throw new TypeError("Expected iterable or array-like: " + e); + t = V(n(e)) + } + return t + } + + function rn(e, t) { + var n, r = function(i) { + if (i instanceof r) return i; + if (!(this instanceof r)) return new r(i); + if (!n) { + n = !0; + var a = Object.keys(e); + ! function(e, t) { + try { + t.forEach(function(e, t) { + Object.defineProperty(e, t, { + get: function() { + return this.get(t) + }, + set: function(e) { + ge(this.__ownerID, "Cannot set on an immutable record."), this.set(t, e) + } + }) + }.bind(void 0, e)) + } catch (e) {} + }(o, a), o.size = a.length, o._name = t, o._keys = a, o._defaultValues = e + } + this._map = Ue(i) + }, + o = r.prototype = Object.create(on); + return o.constructor = r, r + } + t(Pt, Ue), Pt.of = function() { + return this(arguments) + }, Pt.prototype.toString = function() { + return this.__toString("OrderedMap {", "}") + }, Pt.prototype.get = function(e, t) { + var n = this._map.get(e); + return void 0 !== n ? this._list.get(n)[1] : t + }, Pt.prototype.clear = function() { + return 0 === this.size ? this : this.__ownerID ? (this.size = 0, this._map.clear(), this._list.clear(), this) : It() + }, Pt.prototype.set = function(e, t) { + return jt(this, e, t) + }, Pt.prototype.remove = function(e) { + return jt(this, e, y) + }, Pt.prototype.wasAltered = function() { + return this._map.wasAltered() || this._list.wasAltered() + }, Pt.prototype.__iterate = function(e, t) { + var n = this; + return this._list.__iterate(function(t) { + return t && e(t[1], t[0], n) + }, t) + }, Pt.prototype.__iterator = function(e, t) { + return this._list.fromEntrySeq().__iterator(e, t) + }, Pt.prototype.__ensureOwner = function(e) { + if (e === this.__ownerID) return this; + var t = this._map.__ensureOwner(e), + n = this._list.__ensureOwner(e); + return e ? Mt(t, n, e, this.__hash) : (this.__ownerID = e, this._map = t, this._list = n, this) + }, Pt.isOrderedMap = Tt, Pt.prototype[h] = !0, Pt.prototype.delete = Pt.prototype.remove, t(Nt, Y), Nt.prototype.get = function(e, t) { + return this._iter.get(e, t) + }, Nt.prototype.has = function(e) { + return this._iter.has(e) + }, Nt.prototype.valueSeq = function() { + return this._iter.valueSeq() + }, Nt.prototype.reverse = function() { + var e = this, + t = Ft(this, !0); + return this._useKeys || (t.valueSeq = function() { + return e._iter.toSeq().reverse() + }), t + }, Nt.prototype.map = function(e, t) { + var n = this, + r = qt(this, e, t); + return this._useKeys || (r.valueSeq = function() { + return n._iter.toSeq().map(e, t) + }), r + }, Nt.prototype.__iterate = function(e, t) { + var n, r = this; + return this._iter.__iterate(this._useKeys ? function(t, n) { + return e(t, n, r) + } : (n = t ? Zt(this) : 0, function(o) { + return e(o, t ? --n : n++, r) + }), t) + }, Nt.prototype.__iterator = function(e, t) { + if (this._useKeys) return this._iter.__iterator(e, t); + var n = this._iter.__iterator(j, t), + r = t ? Zt(this) : 0; + return new U(function() { + var o = n.next(); + return o.done ? o : q(e, t ? --r : r++, o.value, o) + }) + }, Nt.prototype[h] = !0, t(Rt, K), Rt.prototype.includes = function(e) { + return this._iter.includes(e) + }, Rt.prototype.__iterate = function(e, t) { + var n = this, + r = 0; + return this._iter.__iterate(function(t) { + return e(t, r++, n) + }, t) + }, Rt.prototype.__iterator = function(e, t) { + var n = this._iter.__iterator(j, t), + r = 0; + return new U(function() { + var t = n.next(); + return t.done ? t : q(e, r++, t.value, t) + }) + }, t(Dt, G), Dt.prototype.has = function(e) { + return this._iter.includes(e) + }, Dt.prototype.__iterate = function(e, t) { + var n = this; + return this._iter.__iterate(function(t) { + return e(t, t, n) + }, t) + }, Dt.prototype.__iterator = function(e, t) { + var n = this._iter.__iterator(j, t); + return new U(function() { + var t = n.next(); + return t.done ? t : q(e, t.value, t.value, t) + }) + }, t(Lt, Y), Lt.prototype.entrySeq = function() { + return this._iter.toSeq() + }, Lt.prototype.__iterate = function(e, t) { + var n = this; + return this._iter.__iterate(function(t) { + if (t) { + $t(t); + var r = a(t); + return e(r ? t.get(1) : t[1], r ? t.get(0) : t[0], n) + } + }, t) + }, Lt.prototype.__iterator = function(e, t) { + var n = this._iter.__iterator(j, t); + return new U(function() { + for (;;) { + var t = n.next(); + if (t.done) return t; + var r = t.value; + if (r) { + $t(r); + var o = a(r); + return q(e, o ? r.get(0) : r[0], o ? r.get(1) : r[1], t) + } + } + }) + }, Rt.prototype.cacheResult = Nt.prototype.cacheResult = Dt.prototype.cacheResult = Lt.prototype.cacheResult = en, t(rn, _e), rn.prototype.toString = function() { + return this.__toString(un(this) + " {", "}") + }, rn.prototype.has = function(e) { + return this._defaultValues.hasOwnProperty(e) + }, rn.prototype.get = function(e, t) { + if (!this.has(e)) return t; + var n = this._defaultValues[e]; + return this._map ? this._map.get(e, n) : n + }, rn.prototype.clear = function() { + if (this.__ownerID) return this._map && this._map.clear(), this; + var e = this.constructor; + return e._empty || (e._empty = an(this, Xe())) + }, rn.prototype.set = function(e, t) { + if (!this.has(e)) throw new Error('Cannot set unknown key "' + e + '" on ' + un(this)); + if (this._map && !this._map.has(e) && t === this._defaultValues[e]) return this; + var n = this._map && this._map.set(e, t); + return this.__ownerID || n === this._map ? this : an(this, n) + }, rn.prototype.remove = function(e) { + if (!this.has(e)) return this; + var t = this._map && this._map.remove(e); + return this.__ownerID || t === this._map ? this : an(this, t) + }, rn.prototype.wasAltered = function() { + return this._map.wasAltered() + }, rn.prototype.__iterator = function(e, t) { + var n = this; + return r(this._defaultValues).map(function(e, t) { + return n.get(t) + }).__iterator(e, t) + }, rn.prototype.__iterate = function(e, t) { + var n = this; + return r(this._defaultValues).map(function(e, t) { + return n.get(t) + }).__iterate(e, t) + }, rn.prototype.__ensureOwner = function(e) { + if (e === this.__ownerID) return this; + var t = this._map && this._map.__ensureOwner(e); + return e ? an(this, t, e) : (this.__ownerID = e, this._map = t, this) + }; + var on = rn.prototype; + + function an(e, t, n) { + var r = Object.create(Object.getPrototypeOf(e)); + return r._map = t, r.__ownerID = n, r + } + + function un(e) { + return e._name || e.constructor.name || "Record" + } + + function sn(e) { + return null === e || void 0 === e ? vn() : ln(e) && !c(e) ? e : vn().withMutations(function(t) { + var n = i(e); + Le(n.size), n.forEach(function(e) { + return t.add(e) + }) + }) + } + + function ln(e) { + return !(!e || !e[fn]) + } + on.delete = on.remove, on.deleteIn = on.removeIn = Be.removeIn, on.merge = Be.merge, on.mergeWith = Be.mergeWith, on.mergeIn = Be.mergeIn, on.mergeDeep = Be.mergeDeep, on.mergeDeepWith = Be.mergeDeepWith, on.mergeDeepIn = Be.mergeDeepIn, on.setIn = Be.setIn, on.update = Be.update, on.updateIn = Be.updateIn, on.withMutations = Be.withMutations, on.asMutable = Be.asMutable, on.asImmutable = Be.asImmutable, t(sn, Ee), sn.of = function() { + return this(arguments) + }, sn.fromKeys = function(e) { + return this(r(e).keySeq()) + }, sn.prototype.toString = function() { + return this.__toString("Set {", "}") + }, sn.prototype.has = function(e) { + return this._map.has(e) + }, sn.prototype.add = function(e) { + return dn(this, this._map.set(e, !0)) + }, sn.prototype.remove = function(e) { + return dn(this, this._map.remove(e)) + }, sn.prototype.clear = function() { + return dn(this, this._map.clear()) + }, sn.prototype.union = function() { + var t = e.call(arguments, 0); + return 0 === (t = t.filter(function(e) { + return 0 !== e.size + })).length ? this : 0 !== this.size || this.__ownerID || 1 !== t.length ? this.withMutations(function(e) { + for (var n = 0; n < t.length; n++) i(t[n]).forEach(function(t) { + return e.add(t) + }) + }) : this.constructor(t[0]) + }, sn.prototype.intersect = function() { + var t = e.call(arguments, 0); + if (0 === t.length) return this; + t = t.map(function(e) { + return i(e) + }); + var n = this; + return this.withMutations(function(e) { + n.forEach(function(n) { + t.every(function(e) { + return e.includes(n) + }) || e.remove(n) + }) + }) + }, sn.prototype.subtract = function() { + var t = e.call(arguments, 0); + if (0 === t.length) return this; + t = t.map(function(e) { + return i(e) + }); + var n = this; + return this.withMutations(function(e) { + n.forEach(function(n) { + t.some(function(e) { + return e.includes(n) + }) && e.remove(n) + }) + }) + }, sn.prototype.merge = function() { + return this.union.apply(this, arguments) + }, sn.prototype.mergeWith = function(t) { + var n = e.call(arguments, 1); + return this.union.apply(this, n) + }, sn.prototype.sort = function(e) { + return mn(Wt(this, e)) + }, sn.prototype.sortBy = function(e, t) { + return mn(Wt(this, t, e)) + }, sn.prototype.wasAltered = function() { + return this._map.wasAltered() + }, sn.prototype.__iterate = function(e, t) { + var n = this; + return this._map.__iterate(function(t, r) { + return e(r, r, n) + }, t) + }, sn.prototype.__iterator = function(e, t) { + return this._map.map(function(e, t) { + return t + }).__iterator(e, t) + }, sn.prototype.__ensureOwner = function(e) { + if (e === this.__ownerID) return this; + var t = this._map.__ensureOwner(e); + return e ? this.__make(t, e) : (this.__ownerID = e, this._map = t, this) + }, sn.isSet = ln; + var cn, fn = "@@__IMMUTABLE_SET__@@", + pn = sn.prototype; + + function dn(e, t) { + return e.__ownerID ? (e.size = t.size, e._map = t, e) : t === e._map ? e : 0 === t.size ? e.__empty() : e.__make(t) + } + + function hn(e, t) { + var n = Object.create(pn); + return n.size = e ? e.size : 0, n._map = e, n.__ownerID = t, n + } + + function vn() { + return cn || (cn = hn(Xe())) + } + + function mn(e) { + return null === e || void 0 === e ? wn() : gn(e) ? e : wn().withMutations(function(t) { + var n = i(e); + Le(n.size), n.forEach(function(e) { + return t.add(e) + }) + }) + } + + function gn(e) { + return ln(e) && c(e) + } + pn[fn] = !0, pn.delete = pn.remove, pn.mergeDeep = pn.merge, pn.mergeDeepWith = pn.mergeWith, pn.withMutations = Be.withMutations, pn.asMutable = Be.asMutable, pn.asImmutable = Be.asImmutable, pn.__empty = vn, pn.__make = hn, t(mn, sn), mn.of = function() { + return this(arguments) + }, mn.fromKeys = function(e) { + return this(r(e).keySeq()) + }, mn.prototype.toString = function() { + return this.__toString("OrderedSet {", "}") + }, mn.isOrderedSet = gn; + var yn, bn = mn.prototype; + + function _n(e, t) { + var n = Object.create(bn); + return n.size = e ? e.size : 0, n._map = e, n.__ownerID = t, n + } + + function wn() { + return yn || (yn = _n(It())) + } + + function En(e) { + return null === e || void 0 === e ? On() : xn(e) ? e : On().unshiftAll(e) + } + + function xn(e) { + return !(!e || !e[Cn]) + } + bn[h] = !0, bn.__empty = wn, bn.__make = _n, t(En, we), En.of = function() { + return this(arguments) + }, En.prototype.toString = function() { + return this.__toString("Stack [", "]") + }, En.prototype.get = function(e, t) { + var n = this._head; + for (e = k(this, e); n && e--;) n = n.next; + return n ? n.value : t + }, En.prototype.peek = function() { + return this._head && this._head.value + }, En.prototype.push = function() { + if (0 === arguments.length) return this; + for (var e = this.size + arguments.length, t = this._head, n = arguments.length - 1; n >= 0; n--) t = { + value: arguments[n], + next: t + }; + return this.__ownerID ? (this.size = e, this._head = t, this.__hash = void 0, this.__altered = !0, this) : An(e, t) + }, En.prototype.pushAll = function(e) { + if (0 === (e = o(e)).size) return this; + Le(e.size); + var t = this.size, + n = this._head; + return e.reverse().forEach(function(e) { + t++, n = { + value: e, + next: n + } + }), this.__ownerID ? (this.size = t, this._head = n, this.__hash = void 0, this.__altered = !0, this) : An(t, n) + }, En.prototype.pop = function() { + return this.slice(1) + }, En.prototype.unshift = function() { + return this.push.apply(this, arguments) + }, En.prototype.unshiftAll = function(e) { + return this.pushAll(e) + }, En.prototype.shift = function() { + return this.pop.apply(this, arguments) + }, En.prototype.clear = function() { + return 0 === this.size ? this : this.__ownerID ? (this.size = 0, this._head = void 0, this.__hash = void 0, this.__altered = !0, this) : On() + }, En.prototype.slice = function(e, t) { + if (O(e, t, this.size)) return this; + var n = P(e, this.size); + if (T(t, this.size) !== this.size) return we.prototype.slice.call(this, e, t); + for (var r = this.size - n, o = this._head; n--;) o = o.next; + return this.__ownerID ? (this.size = r, this._head = o, this.__hash = void 0, this.__altered = !0, this) : An(r, o) + }, En.prototype.__ensureOwner = function(e) { + return e === this.__ownerID ? this : e ? An(this.size, this._head, e, this.__hash) : (this.__ownerID = e, this.__altered = !1, this) + }, En.prototype.__iterate = function(e, t) { + if (t) return this.reverse().__iterate(e); + for (var n = 0, r = this._head; r && !1 !== e(r.value, n++, this);) r = r.next; + return n + }, En.prototype.__iterator = function(e, t) { + if (t) return this.reverse().__iterator(e); + var n = 0, + r = this._head; + return new U(function() { + if (r) { + var t = r.value; + return r = r.next, q(e, n++, t) + } + return { + value: void 0, + done: !0 + } + }) + }, En.isStack = xn; + var Sn, Cn = "@@__IMMUTABLE_STACK__@@", + kn = En.prototype; + + function An(e, t, n, r) { + var o = Object.create(kn); + return o.size = e, o._head = t, o.__ownerID = n, o.__hash = r, o.__altered = !1, o + } + + function On() { + return Sn || (Sn = An(0)) + } + + function Pn(e, t) { + var n = function(n) { + e.prototype[n] = t[n] + }; + return Object.keys(t).forEach(n), Object.getOwnPropertySymbols && Object.getOwnPropertySymbols(t).forEach(n), e + } + kn[Cn] = !0, kn.withMutations = Be.withMutations, kn.asMutable = Be.asMutable, kn.asImmutable = Be.asImmutable, kn.wasAltered = Be.wasAltered, n.Iterator = U, Pn(n, { + toArray: function() { + Le(this.size); + var e = new Array(this.size || 0); + return this.valueSeq().__iterate(function(t, n) { + e[n] = t + }), e + }, + toIndexedSeq: function() { + return new Rt(this) + }, + toJS: function() { + return this.toSeq().map(function(e) { + return e && "function" == typeof e.toJS ? e.toJS() : e + }).__toJS() + }, + toJSON: function() { + return this.toSeq().map(function(e) { + return e && "function" == typeof e.toJSON ? e.toJSON() : e + }).__toJS() + }, + toKeyedSeq: function() { + return new Nt(this, !0) + }, + toMap: function() { + return Ue(this.toKeyedSeq()) + }, + toObject: function() { + Le(this.size); + var e = {}; + return this.__iterate(function(t, n) { + e[n] = t + }), e + }, + toOrderedMap: function() { + return Pt(this.toKeyedSeq()) + }, + toOrderedSet: function() { + return mn(u(this) ? this.valueSeq() : this) + }, + toSet: function() { + return sn(u(this) ? this.valueSeq() : this) + }, + toSetSeq: function() { + return new Dt(this) + }, + toSeq: function() { + return s(this) ? this.toIndexedSeq() : u(this) ? this.toKeyedSeq() : this.toSetSeq() + }, + toStack: function() { + return En(u(this) ? this.valueSeq() : this) + }, + toList: function() { + return pt(u(this) ? this.valueSeq() : this) + }, + toString: function() { + return "[Iterable]" + }, + __toString: function(e, t) { + return 0 === this.size ? e + t : e + " " + this.toSeq().map(this.__toStringMapper).join(", ") + " " + t + }, + concat: function() { + return Gt(this, function(e, t) { + var n = u(e), + o = [e].concat(t).map(function(e) { + return a(e) ? n && (e = r(e)) : e = n ? ae(e) : ue(Array.isArray(e) ? e : [e]), e + }).filter(function(e) { + return 0 !== e.size + }); + if (0 === o.length) return e; + if (1 === o.length) { + var i = o[0]; + if (i === e || n && u(i) || s(e) && s(i)) return i + } + var l = new ee(o); + return n ? l = l.toKeyedSeq() : s(e) || (l = l.toSetSeq()), (l = l.flatten(!0)).size = o.reduce(function(e, t) { + if (void 0 !== e) { + var n = t.size; + if (void 0 !== n) return e + n + } + }, 0), l + }(this, e.call(arguments, 0))) + }, + includes: function(e) { + return this.some(function(t) { + return he(t, e) + }) + }, + entries: function() { + return this.__iterator(N) + }, + every: function(e, t) { + Le(this.size); + var n = !0; + return this.__iterate(function(r, o, i) { + if (!e.call(t, r, o, i)) return n = !1, !1 + }), n + }, + filter: function(e, t) { + return Gt(this, zt(this, e, t, !0)) + }, + find: function(e, t, n) { + var r = this.findEntry(e, t); + return r ? r[1] : n + }, + forEach: function(e, t) { + return Le(this.size), this.__iterate(t ? e.bind(t) : e) + }, + join: function(e) { + Le(this.size), e = void 0 !== e ? "" + e : ","; + var t = "", + n = !0; + return this.__iterate(function(r) { + n ? n = !1 : t += e, t += null !== r && void 0 !== r ? r.toString() : "" + }), t + }, + keys: function() { + return this.__iterator(I) + }, + map: function(e, t) { + return Gt(this, qt(this, e, t)) + }, + reduce: function(e, t, n) { + var r, o; + return Le(this.size), arguments.length < 2 ? o = !0 : r = t, this.__iterate(function(t, i, a) { + o ? (o = !1, r = t) : r = e.call(n, r, t, i, a) + }), r + }, + reduceRight: function(e, t, n) { + var r = this.toKeyedSeq().reverse(); + return r.reduce.apply(r, arguments) + }, + reverse: function() { + return Gt(this, Ft(this, !0)) + }, + slice: function(e, t) { + return Gt(this, Bt(this, e, t, !0)) + }, + some: function(e, t) { + return !this.every(Nn(e), t) + }, + sort: function(e) { + return Gt(this, Wt(this, e)) + }, + values: function() { + return this.__iterator(j) + }, + butLast: function() { + return this.slice(0, -1) + }, + isEmpty: function() { + return void 0 !== this.size ? 0 === this.size : !this.some(function() { + return !0 + }) + }, + count: function(e, t) { + return C(e ? this.toSeq().filter(e, t) : this) + }, + countBy: function(e, t) { + return function(e, t, n) { + var r = Ue().asMutable(); + return e.__iterate(function(o, i) { + r.update(t.call(n, o, i, e), 0, function(e) { + return e + 1 + }) + }), r.asImmutable() + }(this, e, t) + }, + equals: function(e) { + return ve(this, e) + }, + entrySeq: function() { + var e = this; + if (e._cache) return new ee(e._cache); + var t = e.toSeq().map(jn).toIndexedSeq(); + return t.fromEntrySeq = function() { + return e.toSeq() + }, t + }, + filterNot: function(e, t) { + return this.filter(Nn(e), t) + }, + findEntry: function(e, t, n) { + var r = n; + return this.__iterate(function(n, o, i) { + if (e.call(t, n, o, i)) return r = [o, n], !1 + }), r + }, + findKey: function(e, t) { + var n = this.findEntry(e, t); + return n && n[0] + }, + findLast: function(e, t, n) { + return this.toKeyedSeq().reverse().find(e, t, n) + }, + findLastEntry: function(e, t, n) { + return this.toKeyedSeq().reverse().findEntry(e, t, n) + }, + findLastKey: function(e, t) { + return this.toKeyedSeq().reverse().findKey(e, t) + }, + first: function() { + return this.find(A) + }, + flatMap: function(e, t) { + return Gt(this, function(e, t, n) { + var r = Xt(e); + return e.toSeq().map(function(o, i) { + return r(t.call(n, o, i, e)) + }).flatten(!0) + }(this, e, t)) + }, + flatten: function(e) { + return Gt(this, Ht(this, e, !0)) + }, + fromEntrySeq: function() { + return new Lt(this) + }, + get: function(e, t) { + return this.find(function(t, n) { + return he(n, e) + }, void 0, t) + }, + getIn: function(e, t) { + for (var n, r = this, o = nn(e); !(n = o.next()).done;) { + var i = n.value; + if ((r = r && r.get ? r.get(i, y) : y) === y) return t + } + return r + }, + groupBy: function(e, t) { + return function(e, t, n) { + var r = u(e), + o = (c(e) ? Pt() : Ue()).asMutable(); + e.__iterate(function(i, a) { + o.update(t.call(n, i, a, e), function(e) { + return (e = e || []).push(r ? [a, i] : i), e + }) + }); + var i = Xt(e); + return o.map(function(t) { + return Gt(e, i(t)) + }) + }(this, e, t) + }, + has: function(e) { + return this.get(e, y) !== y + }, + hasIn: function(e) { + return this.getIn(e, y) !== y + }, + isSubset: function(e) { + return e = "function" == typeof e.includes ? e : n(e), this.every(function(t) { + return e.includes(t) + }) + }, + isSuperset: function(e) { + return (e = "function" == typeof e.isSubset ? e : n(e)).isSubset(this) + }, + keyOf: function(e) { + return this.findKey(function(t) { + return he(t, e) + }) + }, + keySeq: function() { + return this.toSeq().map(In).toIndexedSeq() + }, + last: function() { + return this.toSeq().reverse().first() + }, + lastKeyOf: function(e) { + return this.toKeyedSeq().reverse().keyOf(e) + }, + max: function(e) { + return Jt(this, e) + }, + maxBy: function(e, t) { + return Jt(this, t, e) + }, + min: function(e) { + return Jt(this, e ? Rn(e) : Un) + }, + minBy: function(e, t) { + return Jt(this, t ? Rn(t) : Un, e) + }, + rest: function() { + return this.slice(1) + }, + skip: function(e) { + return this.slice(Math.max(0, e)) + }, + skipLast: function(e) { + return Gt(this, this.toSeq().reverse().skip(e).reverse()) + }, + skipWhile: function(e, t) { + return Gt(this, Vt(this, e, t, !0)) + }, + skipUntil: function(e, t) { + return this.skipWhile(Nn(e), t) + }, + sortBy: function(e, t) { + return Gt(this, Wt(this, t, e)) + }, + take: function(e) { + return this.slice(0, Math.max(0, e)) + }, + takeLast: function(e) { + return Gt(this, this.toSeq().reverse().take(e).reverse()) + }, + takeWhile: function(e, t) { + return Gt(this, function(e, t, n) { + var r = Qt(e); + return r.__iterateUncached = function(r, o) { + var i = this; + if (o) return this.cacheResult().__iterate(r, o); + var a = 0; + return e.__iterate(function(e, o, u) { + return t.call(n, e, o, u) && ++a && r(e, o, i) + }), a + }, r.__iteratorUncached = function(r, o) { + var i = this; + if (o) return this.cacheResult().__iterator(r, o); + var a = e.__iterator(N, o), + u = !0; + return new U(function() { + if (!u) return { + value: void 0, + done: !0 + }; + var e = a.next(); + if (e.done) return e; + var o = e.value, + s = o[0], + l = o[1]; + return t.call(n, l, s, i) ? r === N ? e : q(r, s, l, e) : (u = !1, { + value: void 0, + done: !0 + }) + }) + }, r + }(this, e, t)) + }, + takeUntil: function(e, t) { + return this.takeWhile(Nn(e), t) + }, + valueSeq: function() { + return this.toIndexedSeq() + }, + hashCode: function() { + return this.__hash || (this.__hash = function(e) { + if (e.size === 1 / 0) return 0; + var t = c(e), + n = u(e), + r = t ? 1 : 0; + return function(e, t) { + return t = xe(t, 3432918353), t = xe(t << 15 | t >>> -15, 461845907), t = xe(t << 13 | t >>> -13, 5), t = xe((t = (t + 3864292196 | 0) ^ e) ^ t >>> 16, 2246822507), t = Se((t = xe(t ^ t >>> 13, 3266489909)) ^ t >>> 16) + }(e.__iterate(n ? t ? function(e, t) { + r = 31 * r + qn(Ce(e), Ce(t)) | 0 + } : function(e, t) { + r = r + qn(Ce(e), Ce(t)) | 0 + } : t ? function(e) { + r = 31 * r + Ce(e) | 0 + } : function(e) { + r = r + Ce(e) | 0 + }), r) + }(this)) + } + }); + var Tn = n.prototype; + Tn[f] = !0, Tn[L] = Tn.values, Tn.__toJS = Tn.toArray, Tn.__toStringMapper = Dn, Tn.inspect = Tn.toSource = function() { + return this.toString() + }, Tn.chain = Tn.flatMap, Tn.contains = Tn.includes, Pn(r, { + flip: function() { + return Gt(this, Ut(this)) + }, + mapEntries: function(e, t) { + var n = this, + r = 0; + return Gt(this, this.toSeq().map(function(o, i) { + return e.call(t, [i, o], r++, n) + }).fromEntrySeq()) + }, + mapKeys: function(e, t) { + var n = this; + return Gt(this, this.toSeq().flip().map(function(r, o) { + return e.call(t, r, o, n) + }).flip()) + } + }); + var Mn = r.prototype; + + function In(e, t) { + return t + } + + function jn(e, t) { + return [t, e] + } + + function Nn(e) { + return function() { + return !e.apply(this, arguments) + } + } + + function Rn(e) { + return function() { + return -e.apply(this, arguments) + } + } + + function Dn(e) { + return "string" == typeof e ? JSON.stringify(e) : String(e) + } + + function Ln() { + return S(arguments) + } + + function Un(e, t) { + return e < t ? 1 : e > t ? -1 : 0 + } + + function qn(e, t) { + return e ^ t + 2654435769 + (e << 6) + (e >> 2) | 0 + } + return Mn[p] = !0, Mn[L] = Tn.entries, Mn.__toJS = Tn.toObject, Mn.__toStringMapper = function(e, t) { + return JSON.stringify(t) + ": " + Dn(e) + }, Pn(o, { + toKeyedSeq: function() { + return new Nt(this, !1) + }, + filter: function(e, t) { + return Gt(this, zt(this, e, t, !1)) + }, + findIndex: function(e, t) { + var n = this.findEntry(e, t); + return n ? n[0] : -1 + }, + indexOf: function(e) { + var t = this.keyOf(e); + return void 0 === t ? -1 : t + }, + lastIndexOf: function(e) { + var t = this.lastKeyOf(e); + return void 0 === t ? -1 : t + }, + reverse: function() { + return Gt(this, Ft(this, !1)) + }, + slice: function(e, t) { + return Gt(this, Bt(this, e, t, !1)) + }, + splice: function(e, t) { + var n = arguments.length; + if (t = Math.max(0 | t, 0), 0 === n || 2 === n && !t) return this; + e = P(e, e < 0 ? this.count() : this.size); + var r = this.slice(0, e); + return Gt(this, 1 === n ? r : r.concat(S(arguments, 2), this.slice(e + t))) + }, + findLastIndex: function(e, t) { + var n = this.findLastEntry(e, t); + return n ? n[0] : -1 + }, + first: function() { + return this.get(0) + }, + flatten: function(e) { + return Gt(this, Ht(this, e, !1)) + }, + get: function(e, t) { + return (e = k(this, e)) < 0 || this.size === 1 / 0 || void 0 !== this.size && e > this.size ? t : this.find(function(t, n) { + return n === e + }, void 0, t) + }, + has: function(e) { + return (e = k(this, e)) >= 0 && (void 0 !== this.size ? this.size === 1 / 0 || e < this.size : -1 !== this.indexOf(e)) + }, + interpose: function(e) { + return Gt(this, function(e, t) { + var n = Qt(e); + return n.size = e.size && 2 * e.size - 1, n.__iterateUncached = function(n, r) { + var o = this, + i = 0; + return e.__iterate(function(e, r) { + return (!i || !1 !== n(t, i++, o)) && !1 !== n(e, i++, o) + }, r), i + }, n.__iteratorUncached = function(n, r) { + var o, i = e.__iterator(j, r), + a = 0; + return new U(function() { + return (!o || a % 2) && (o = i.next()).done ? o : a % 2 ? q(n, a++, t) : q(n, a++, o.value, o) + }) + }, n + }(this, e)) + }, + interleave: function() { + var e = [this].concat(S(arguments)), + t = Kt(this.toSeq(), K.of, e), + n = t.flatten(!0); + return t.size && (n.size = t.size * e.length), Gt(this, n) + }, + keySeq: function() { + return ye(0, this.size) + }, + last: function() { + return this.get(-1) + }, + skipWhile: function(e, t) { + return Gt(this, Vt(this, e, t, !1)) + }, + zip: function() { + return Gt(this, Kt(this, Ln, [this].concat(S(arguments)))) + }, + zipWith: function(e) { + var t = S(arguments); + return t[0] = this, Gt(this, Kt(this, e, t)) + } + }), o.prototype[d] = !0, o.prototype[h] = !0, Pn(i, { + get: function(e, t) { + return this.has(e) ? e : t + }, + includes: function(e) { + return this.has(e) + }, + keySeq: function() { + return this.valueSeq() + } + }), i.prototype.has = Tn.includes, i.prototype.contains = i.prototype.includes, Pn(Y, r.prototype), Pn(K, o.prototype), Pn(G, i.prototype), Pn(_e, r.prototype), Pn(we, o.prototype), Pn(Ee, i.prototype), { + Iterable: n, + Seq: J, + Collection: be, + Map: Ue, + OrderedMap: Pt, + List: pt, + Stack: En, + Set: sn, + OrderedSet: mn, + Record: rn, + Range: ye, + Repeat: me, + is: he, + fromJS: fe + } + }, e.exports = r() + }, function(e, t, n) { + "use strict"; + var r = function(e) {}; + e.exports = function(e, t, n, o, i, a, u, s) { + if (r(t), !e) { + var l; + if (void 0 === t) l = new Error("Minified exception occurred; use the non-minified dev environment for the full error message and additional helpful warnings."); + else { + var c = [n, o, i, a, u, s], + f = 0; + (l = new Error(t.replace(/%s/g, function() { + return c[f++] + }))).name = "Invariant Violation" + } + throw l.framesToPop = 1, l + } + } + }, function(e, t, n) { + "use strict"; + var r = n(34); + e.exports = r + }, function(e, t, n) { + "use strict"; + (function(e) { + Object.defineProperty(t, "__esModule", { + value: !0 + }), t.getCommonExtensions = t.getExtensions = t.escapeDeepLinkPath = t.createDeepLinkPath = t.shallowEqualKeys = t.buildFormData = t.sorters = t.btoa = t.serializeSearch = t.parseSearch = t.getSampleSchema = t.validateParam = t.validatePattern = t.validateMinLength = t.validateMaxLength = t.validateGuid = t.validateDateTime = t.validateString = t.validateBoolean = t.validateFile = t.validateInteger = t.validateNumber = t.validateMinimum = t.validateMaximum = t.propChecker = t.memoize = t.isImmutable = void 0; + var r = _(n(41)), + o = _(n(18)), + i = _(n(91)), + a = _(n(23)), + u = _(n(42)), + s = _(n(45)); + t.isJSONObject = function(e) { + try { + var t = JSON.parse(e); + if (t && "object" === (void 0 === t ? "undefined" : (0, s.default)(t))) return t + } catch (e) {} + return !1 + }, t.objectify = function(e) { + return S(e) ? E(e) ? e.toJS() : e : {} + }, t.arrayify = function(e) { + return e ? e.toArray ? e.toArray() : x(e) : [] + }, t.fromJSOrdered = function e(t) { + if (E(t)) return t; + if (t instanceof y.default.File) return t; + return S(t) ? Array.isArray(t) ? l.default.Seq(t).map(e).toList() : l.default.OrderedMap(t).map(e) : t + }, t.bindToState = function(e, t) { + var n = {}; + return (0, u.default)(e).filter(function(t) { + return "function" == typeof e[t] + }).forEach(function(r) { + return n[r] = e[r].bind(null, t) + }), n + }, t.normalizeArray = x, t.isFn = function(e) { + return "function" == typeof e + }, t.isObject = S, t.isFunc = function(e) { + return "function" == typeof e + }, t.isArray = function(e) { + return Array.isArray(e) + }, t.objMap = function(e, t) { + return (0, u.default)(e).reduce(function(n, r) { + return n[r] = t(e[r], r), n + }, {}) + }, t.objReduce = function(e, t) { + return (0, u.default)(e).reduce(function(n, r) { + var o = t(e[r], r); + return o && "object" === (void 0 === o ? "undefined" : (0, s.default)(o)) && (0, a.default)(n, o), n + }, {}) + }, t.systemThunkMiddleware = function(e) { + return function(t) { + t.dispatch, t.getState; + return function(t) { + return function(n) { + return "function" == typeof n ? n(e()) : t(n) + } + } + } + }, t.defaultStatusCode = function(e) { + var t = e.keySeq(); + return t.contains(w) ? w : t.filter(function(e) { + return "2" === (e + "")[0] + }).sort().first() + }, t.getList = function(e, t) { + if (!l.default.Iterable.isIterable(e)) return l.default.List(); + var n = e.getIn(Array.isArray(t) ? t : [t]); + return l.default.List.isList(n) ? n : l.default.List() + }, t.highlight = function(e) { + var t = document; + if (!e) return ""; + if (e.textContent.length > 5e3) return e.textContent; + return function(e) { + for (var n, r, o, i, a, u = e.textContent, s = 0, l = u[0], c = 1, f = e.innerHTML = "", p = 0; r = n, n = p < 7 && "\\" == n ? 1 : c;) { + if (c = l, l = u[++s], i = f.length > 1, !c || p > 8 && "\n" == c || [/\S/.test(c), 1, 1, !/[$\w]/.test(c), ("/" == n || "\n" == n) && i, '"' == n && i, "'" == n && i, u[s - 4] + r + n == "--\x3e", r + n == "*/"][p]) + for (f && (e.appendChild(a = t.createElement("span")).setAttribute("style", ["color: #555; font-weight: bold;", "", "", "color: #555;", ""][p ? p < 3 ? 2 : p > 6 ? 4 : p > 3 ? 3 : +/^(a(bstract|lias|nd|rguments|rray|s(m|sert)?|uto)|b(ase|egin|ool(ean)?|reak|yte)|c(ase|atch|har|hecked|lass|lone|ompl|onst|ontinue)|de(bugger|cimal|clare|f(ault|er)?|init|l(egate|ete)?)|do|double|e(cho|ls?if|lse(if)?|nd|nsure|num|vent|x(cept|ec|p(licit|ort)|te(nds|nsion|rn)))|f(allthrough|alse|inal(ly)?|ixed|loat|or(each)?|riend|rom|unc(tion)?)|global|goto|guard|i(f|mp(lements|licit|ort)|n(it|clude(_once)?|line|out|stanceof|t(erface|ernal)?)?|s)|l(ambda|et|ock|ong)|m(icrolight|odule|utable)|NaN|n(amespace|ative|ext|ew|il|ot|ull)|o(bject|perator|r|ut|verride)|p(ackage|arams|rivate|rotected|rotocol|ublic)|r(aise|e(adonly|do|f|gister|peat|quire(_once)?|scue|strict|try|turn))|s(byte|ealed|elf|hort|igned|izeof|tatic|tring|truct|ubscript|uper|ynchronized|witch)|t(emplate|hen|his|hrows?|ransient|rue|ry|ype(alias|def|id|name|of))|u(n(checked|def(ined)?|ion|less|signed|til)|se|sing)|v(ar|irtual|oid|olatile)|w(char_t|hen|here|hile|ith)|xor|yield)$/.test(f) : 0]), a.appendChild(t.createTextNode(f))), o = p && p < 7 ? p : o, f = "", p = 11; ![1, /[\/{}[(\-+*=<>:;|\\.,?!&@~]/.test(c), /[\])]/.test(c), /[$\w]/.test(c), "/" == c && o < 2 && "<" != n, '"' == c, "'" == c, c + l + u[s + 1] + u[s + 2] == "\x3c!--", c + l == "/*", c + l == "//", "#" == c][--p];); + f += c + } + }(e) + }, t.mapToList = function e(t) { + var n = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : "key"; + var r = arguments.length > 2 && void 0 !== arguments[2] ? arguments[2] : l.default.Map(); + if (!l.default.Map.isMap(t) || !t.size) return l.default.List(); + Array.isArray(n) || (n = [n]); + if (n.length < 1) return t.merge(r); + var a = l.default.List(); + var u = n[0]; + var s = !0; + var c = !1; + var f = void 0; + try { + for (var p, d = (0, i.default)(t.entries()); !(s = (p = d.next()).done); s = !0) { + var h = p.value, + v = (0, o.default)(h, 2), + m = v[0], + g = v[1], + y = e(g, n.slice(1), r.set(u, m)); + a = l.default.List.isList(y) ? a.concat(y) : a.push(y) + } + } catch (e) { + c = !0, f = e + } finally { + try { + !s && d.return && d.return() + } finally { + if (c) throw f + } + } + return a + }, t.extractFileNameFromContentDispositionHeader = function(e) { + var t = /filename="([^;]*);?"/i.exec(e); + null === t && (t = /filename=([^;]*);?/i.exec(e)); + if (null !== t && t.length > 1) return t[1]; + return null + }, t.pascalCase = C, t.pascalCaseFilename = function(e) { + return C(e.replace(/\.[^./]*$/, "")) + }, t.sanitizeUrl = function(e) { + if ("string" != typeof e || "" === e) return ""; + return (0, c.sanitizeUrl)(e) + }, t.getAcceptControllingResponse = function(e) { + if (!l.default.OrderedMap.isOrderedMap(e)) return null; + if (!e.size) return null; + var t = e.find(function(e, t) { + return t.startsWith("2") && (0, u.default)(e.get("content") || {}).length > 0 + }), + n = e.get("default") || l.default.OrderedMap(), + r = (n.get("content") || l.default.OrderedMap()).keySeq().toJS().length ? n : null; + return t || r + }, t.deeplyStripKey = function e(t, n) { + var r = arguments.length > 2 && void 0 !== arguments[2] ? arguments[2] : function() { + return !0 + }; + if ("object" !== (void 0 === t ? "undefined" : (0, s.default)(t)) || Array.isArray(t) || null === t || !n) return t; + var o = (0, a.default)({}, t); + (0, u.default)(o).forEach(function(t) { + t === n && r(o[t], t) ? delete o[t] : o[t] = e(o[t], n, r) + }); + return o + }, t.stringify = function(e) { + if ("string" == typeof e) return e; + e.toJS && (e = e.toJS()); + if ("object" === (void 0 === e ? "undefined" : (0, s.default)(e)) && null !== e) try { + return (0, r.default)(e, null, 2) + } catch (t) { + return String(e) + } + return e.toString() + }, t.numberToString = function(e) { + if ("number" == typeof e) return e.toString(); + return e + }; + var l = _(n(7)), + c = n(571), + f = _(n(572)), + p = _(n(280)), + d = _(n(284)), + h = _(n(287)), + v = _(n(650)), + m = _(n(105)), + g = n(192), + y = _(n(32)), + b = _(n(723)); + + function _(e) { + return e && e.__esModule ? e : { + default: e + } + } + var w = "default", + E = t.isImmutable = function(e) { + return l.default.Iterable.isIterable(e) + }; + + function x(e) { + return Array.isArray(e) ? e : [e] + } + + function S(e) { + return !!e && "object" === (void 0 === e ? "undefined" : (0, s.default)(e)) + } + t.memoize = d.default; + + function C(e) { + return (0, p.default)((0, f.default)(e)) + } + t.propChecker = function(e, t) { + var n = arguments.length > 2 && void 0 !== arguments[2] ? arguments[2] : [], + r = arguments.length > 3 && void 0 !== arguments[3] ? arguments[3] : []; + return (0, u.default)(e).length !== (0, u.default)(t).length || ((0, v.default)(e, function(e, n) { + if (r.includes(n)) return !1; + var o = t[n]; + return l.default.Iterable.isIterable(e) ? !l.default.is(e, o) : ("object" !== (void 0 === e ? "undefined" : (0, s.default)(e)) || "object" !== (void 0 === o ? "undefined" : (0, s.default)(o))) && e !== o + }) || n.some(function(n) { + return !(0, m.default)(e[n], t[n]) + })) + }; + var k = t.validateMaximum = function(e, t) { + if (e > t) return "Value must be less than Maximum" + }, + A = t.validateMinimum = function(e, t) { + if (e < t) return "Value must be greater than Minimum" + }, + O = t.validateNumber = function(e) { + if (!/^-?\d+(\.?\d+)?$/.test(e)) return "Value must be a number" + }, + P = t.validateInteger = function(e) { + if (!/^-?\d+$/.test(e)) return "Value must be an integer" + }, + T = t.validateFile = function(e) { + if (e && !(e instanceof y.default.File)) return "Value must be a file" + }, + M = t.validateBoolean = function(e) { + if ("true" !== e && "false" !== e && !0 !== e && !1 !== e) return "Value must be a boolean" + }, + I = t.validateString = function(e) { + if (e && "string" != typeof e) return "Value must be a string" + }, + j = t.validateDateTime = function(e) { + if (isNaN(Date.parse(e))) return "Value must be a DateTime" + }, + N = t.validateGuid = function(e) { + if (e = e.toString().toLowerCase(), !/^[{(]?[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}[)}]?$/.test(e)) return "Value must be a Guid" + }, + R = t.validateMaxLength = function(e, t) { + if (e.length > t) return "Value must be less than MaxLength" + }, + D = t.validateMinLength = function(e, t) { + if (e.length < t) return "Value must be greater than MinLength" + }, + L = t.validatePattern = function(e, t) { + if (!new RegExp(t).test(e)) return "Value must follow pattern " + t + }; + t.validateParam = function(e, t) { + var n = arguments.length > 2 && void 0 !== arguments[2] && arguments[2], + r = [], + o = t && "body" === e.get("in") ? e.get("value_xml") : e.get("value"), + i = e.get("required"), + a = n ? e.get("schema") : e; + if (!a) return r; + var u = a.get("maximum"), + c = a.get("minimum"), + f = a.get("type"), + p = a.get("format"), + d = a.get("maxLength"), + h = a.get("minLength"), + v = a.get("pattern"); + if (f && (i || o)) { + var m = "string" === f && o, + g = "array" === f && Array.isArray(o) && o.length, + b = "array" === f && l.default.List.isList(o) && o.count(), + _ = "file" === f && o instanceof y.default.File, + w = "boolean" === f && (o || !1 === o), + E = "number" === f && (o || 0 === o), + x = "integer" === f && (o || 0 === o), + S = !1; + if (n && "object" === f) + if ("object" === (void 0 === o ? "undefined" : (0, s.default)(o))) S = !0; + else if ("string" == typeof o) try { + JSON.parse(o), S = !0 + } catch (e) { + return r.push("Parameter string value must be valid JSON"), r + } + var C = [m, g, b, _, w, E, x, S].some(function(e) { + return !!e + }); + if (i && !C) return r.push("Required field is not provided"), r; + if (v) { + var U = L(o, v); + U && r.push(U) + } + if (d || 0 === d) { + var q = R(o, d); + q && r.push(q) + } + if (h) { + var F = D(o, h); + F && r.push(F) + } + if (u || 0 === u) { + var z = k(o, u); + z && r.push(z) + } + if (c || 0 === c) { + var B = A(o, c); + B && r.push(B) + } + if ("string" === f) { + var V = void 0; + if (!(V = "date-time" === p ? j(o) : "uuid" === p ? N(o) : I(o))) return r; + r.push(V) + } else if ("boolean" === f) { + var H = M(o); + if (!H) return r; + r.push(H) + } else if ("number" === f) { + var W = O(o); + if (!W) return r; + r.push(W) + } else if ("integer" === f) { + var J = P(o); + if (!J) return r; + r.push(J) + } else if ("array" === f) { + var Y; + if (!b || !o.count()) return r; + Y = a.getIn(["items", "type"]), o.forEach(function(e, t) { + var n = void 0; + "number" === Y ? n = O(e) : "integer" === Y ? n = P(e) : "string" === Y && (n = I(e)), n && r.push({ + index: t, + error: n + }) + }) + } else if ("file" === f) { + var K = T(o); + if (!K) return r; + r.push(K) + } + } + return r + }, t.getSampleSchema = function(e) { + var t = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : "", + n = arguments.length > 2 && void 0 !== arguments[2] ? arguments[2] : {}; + if (/xml/.test(t)) { + if (!e.xml || !e.xml.name) { + if (e.xml = e.xml || {}, !e.$$ref) return e.type || e.items || e.properties || e.additionalProperties ? '\n\x3c!-- XML example cannot be generated --\x3e' : null; + var o = e.$$ref.match(/\S*\/(\S+)$/); + e.xml.name = o[1] + } + return (0, g.memoizedCreateXMLExample)(e, n) + } + var i = (0, g.memoizedSampleFromSchema)(e, n); + return "object" === (void 0 === i ? "undefined" : (0, s.default)(i)) ? (0, r.default)(i, null, 2) : i + }, t.parseSearch = function() { + var e = {}, + t = y.default.location.search; + if (!t) return {}; + if ("" != t) { + var n = t.substr(1).split("&"); + for (var r in n) n.hasOwnProperty(r) && (r = n[r].split("="), e[decodeURIComponent(r[0])] = r[1] && decodeURIComponent(r[1]) || "") + } + return e + }, t.serializeSearch = function(e) { + return (0, u.default)(e).map(function(t) { + return encodeURIComponent(t) + "=" + encodeURIComponent(e[t]) + }).join("&") + }, t.btoa = function(t) { + return (t instanceof e ? t : new e(t.toString(), "utf-8")).toString("base64") + }, t.sorters = { + operationsSorter: { + alpha: function(e, t) { + return e.get("path").localeCompare(t.get("path")) + }, + method: function(e, t) { + return e.get("method").localeCompare(t.get("method")) + } + }, + tagsSorter: { + alpha: function(e, t) { + return e.localeCompare(t) + } + } + }, t.buildFormData = function(e) { + var t = []; + for (var n in e) { + var r = e[n]; + void 0 !== r && "" !== r && t.push([n, "=", encodeURIComponent(r).replace(/%20/g, "+")].join("")) + } + return t.join("&") + }, t.shallowEqualKeys = function(e, t, n) { + return !!(0, h.default)(n, function(n) { + return (0, m.default)(e[n], t[n]) + }) + }; + var U = t.createDeepLinkPath = function(e) { + return "string" == typeof e || e instanceof String ? e.trim().replace(/\s/g, "_") : "" + }; + t.escapeDeepLinkPath = function(e) { + return (0, b.default)(U(e)) + }, t.getExtensions = function(e) { + return e.filter(function(e, t) { + return /^x-/.test(t) + }) + }, t.getCommonExtensions = function(e) { + return e.filter(function(e, t) { + return /^pattern|maxLength|minLength|maximum|minimum/.test(t) + }) + } + }).call(t, n(54).Buffer) + }, function(e, t, n) { + "use strict"; + e.exports = function(e) { + for (var t = arguments.length - 1, n = "Minified React error #" + e + "; visit http://facebook.github.io/react/docs/error-decoder.html?invariant=" + e, r = 0; r < t; r++) n += "&args[]=" + encodeURIComponent(arguments[r + 1]); + n += " for the full message or use the non-minified dev environment for full errors and additional helpful warnings."; + var o = new Error(n); + throw o.name = "Invariant Violation", o.framesToPop = 1, o + } + }, function(e, t, n) { + "use strict"; + var r = n(7), + o = "<>", + i = { + listOf: function(e) { + return l(e, "List", r.List.isList) + }, + mapOf: function(e, t) { + return c(e, t, "Map", r.Map.isMap) + }, + orderedMapOf: function(e, t) { + return c(e, t, "OrderedMap", r.OrderedMap.isOrderedMap) + }, + setOf: function(e) { + return l(e, "Set", r.Set.isSet) + }, + orderedSetOf: function(e) { + return l(e, "OrderedSet", r.OrderedSet.isOrderedSet) + }, + stackOf: function(e) { + return l(e, "Stack", r.Stack.isStack) + }, + iterableOf: function(e) { + return l(e, "Iterable", r.Iterable.isIterable) + }, + recordOf: function(e) { + return u(function(t, n, o, i, u) { + for (var s = arguments.length, l = Array(s > 5 ? s - 5 : 0), c = 5; c < s; c++) l[c - 5] = arguments[c]; + var f = t[n]; + if (!(f instanceof r.Record)) { + var p = a(f), + d = i; + return new Error("Invalid " + d + " `" + u + "` of type `" + p + "` supplied to `" + o + "`, expected an Immutable.js Record.") + } + for (var h in e) { + var v = e[h]; + if (v) { + var m = f.toObject(), + g = v.apply(void 0, [m, h, o, i, u + "." + h].concat(l)); + if (g) return g + } + } + }) + }, + shape: p, + contains: p, + mapContains: function(e) { + return f(e, "Map", r.Map.isMap) + }, + list: s("List", r.List.isList), + map: s("Map", r.Map.isMap), + orderedMap: s("OrderedMap", r.OrderedMap.isOrderedMap), + set: s("Set", r.Set.isSet), + orderedSet: s("OrderedSet", r.OrderedSet.isOrderedSet), + stack: s("Stack", r.Stack.isStack), + seq: s("Seq", r.Seq.isSeq), + record: s("Record", function(e) { + return e instanceof r.Record + }), + iterable: s("Iterable", r.Iterable.isIterable) + }; + + function a(e) { + var t = typeof e; + return Array.isArray(e) ? "array" : e instanceof RegExp ? "object" : e instanceof r.Iterable ? "Immutable." + e.toSource().split(" ")[0] : t + } + + function u(e) { + function t(t, n, r, i, a, u) { + for (var s = arguments.length, l = Array(s > 6 ? s - 6 : 0), c = 6; c < s; c++) l[c - 6] = arguments[c]; + return u = u || r, i = i || o, null != n[r] ? e.apply(void 0, [n, r, i, a, u].concat(l)) : t ? new Error("Required " + a + " `" + u + "` was not specified in `" + i + "`.") : void 0 + } + var n = t.bind(null, !1); + return n.isRequired = t.bind(null, !0), n + } + + function s(e, t) { + return u(function(n, r, o, i, u) { + var s = n[r]; + if (!t(s)) { + var l = a(s); + return new Error("Invalid " + i + " `" + u + "` of type `" + l + "` supplied to `" + o + "`, expected `" + e + "`.") + } + return null + }) + } + + function l(e, t, n) { + return u(function(r, o, i, u, s) { + for (var l = arguments.length, c = Array(l > 5 ? l - 5 : 0), f = 5; f < l; f++) c[f - 5] = arguments[f]; + var p = r[o]; + if (!n(p)) { + var d = u, + h = a(p); + return new Error("Invalid " + d + " `" + s + "` of type `" + h + "` supplied to `" + i + "`, expected an Immutable.js " + t + ".") + } + if ("function" != typeof e) return new Error("Invalid typeChecker supplied to `" + i + "` for propType `" + s + "`, expected a function."); + for (var v = p.toArray(), m = 0, g = v.length; m < g; m++) { + var y = e.apply(void 0, [v, m, i, u, s + "[" + m + "]"].concat(c)); + if (y instanceof Error) return y + } + }) + } + + function c(e, t, n, r) { + return u(function() { + for (var o = arguments.length, i = Array(o), a = 0; a < o; a++) i[a] = arguments[a]; + return l(e, n, r).apply(void 0, i) || t && (s = t, u(function(e, t, n, r, o) { + for (var i = arguments.length, a = Array(i > 5 ? i - 5 : 0), u = 5; u < i; u++) a[u - 5] = arguments[u]; + var l = e[t]; + if ("function" != typeof s) return new Error("Invalid keysTypeChecker (optional second argument) supplied to `" + n + "` for propType `" + o + "`, expected a function."); + for (var c = l.keySeq().toArray(), f = 0, p = c.length; f < p; f++) { + var d = s.apply(void 0, [c, f, n, r, o + " -> key(" + c[f] + ")"].concat(a)); + if (d instanceof Error) return d + } + })).apply(void 0, i); + var s + }) + } + + function f(e) { + var t = void 0 === arguments[1] ? "Iterable" : arguments[1], + n = void 0 === arguments[2] ? r.Iterable.isIterable : arguments[2]; + return u(function(r, o, i, u, s) { + for (var l = arguments.length, c = Array(l > 5 ? l - 5 : 0), f = 5; f < l; f++) c[f - 5] = arguments[f]; + var p = r[o]; + if (!n(p)) { + var d = a(p); + return new Error("Invalid " + u + " `" + s + "` of type `" + d + "` supplied to `" + i + "`, expected an Immutable.js " + t + ".") + } + var h = p.toObject(); + for (var v in e) { + var m = e[v]; + if (m) { + var g = m.apply(void 0, [h, v, i, u, s + "." + v].concat(c)); + if (g) return g + } + } + }) + } + + function p(e) { + return f(e) + } + e.exports = i + }, function(e, t, n) { + "use strict"; + /* + object-assign + (c) Sindre Sorhus + @license MIT + */ + var r = Object.getOwnPropertySymbols, + o = Object.prototype.hasOwnProperty, + i = Object.prototype.propertyIsEnumerable; + e.exports = function() { + try { + if (!Object.assign) return !1; + var e = new String("abc"); + if (e[5] = "de", "5" === Object.getOwnPropertyNames(e)[0]) return !1; + for (var t = {}, n = 0; n < 10; n++) t["_" + String.fromCharCode(n)] = n; + if ("0123456789" !== Object.getOwnPropertyNames(t).map(function(e) { + return t[e] + }).join("")) return !1; + var r = {}; + return "abcdefghijklmnopqrst".split("").forEach(function(e) { + r[e] = e + }), "abcdefghijklmnopqrst" === Object.keys(Object.assign({}, r)).join("") + } catch (e) { + return !1 + } + }() ? Object.assign : function(e, t) { + for (var n, a, u = function(e) { + if (null === e || void 0 === e) throw new TypeError("Object.assign cannot be called with null or undefined"); + return Object(e) + }(e), s = 1; s < arguments.length; s++) { + for (var l in n = Object(arguments[s])) o.call(n, l) && (u[l] = n[l]); + if (r) { + a = r(n); + for (var c = 0; c < a.length; c++) i.call(n, a[c]) && (u[a[c]] = n[a[c]]) + } + } + return u + } + }, function(e, t, n) { + "use strict"; + var r = n(11), + o = n(87), + i = n(350), + a = (n(8), o.ID_ATTRIBUTE_NAME), + u = i, + s = "__reactInternalInstance$" + Math.random().toString(36).slice(2); + + function l(e, t) { + return 1 === e.nodeType && e.getAttribute(a) === String(t) || 8 === e.nodeType && e.nodeValue === " react-text: " + t + " " || 8 === e.nodeType && e.nodeValue === " react-empty: " + t + " " + } + + function c(e) { + for (var t; t = e._renderedComponent;) e = t; + return e + } + + function f(e, t) { + var n = c(e); + n._hostNode = t, t[s] = n + } + + function p(e, t) { + if (!(e._flags & u.hasCachedChildNodes)) { + var n = e._renderedChildren, + o = t.firstChild; + e: for (var i in n) + if (n.hasOwnProperty(i)) { + var a = n[i], + s = c(a)._domID; + if (0 !== s) { + for (; null !== o; o = o.nextSibling) + if (l(o, s)) { + f(a, o); + continue e + } + r("32", s) + } + } + e._flags |= u.hasCachedChildNodes + } + } + + function d(e) { + if (e[s]) return e[s]; + for (var t, n, r = []; !e[s];) { + if (r.push(e), !e.parentNode) return null; + e = e.parentNode + } + for (; e && (n = e[s]); e = r.pop()) t = n, r.length && p(n, e); + return t + } + var h = { + getClosestInstanceFromNode: d, + getInstanceFromNode: function(e) { + var t = d(e); + return null != t && t._hostNode === e ? t : null + }, + getNodeFromInstance: function(e) { + if (void 0 === e._hostNode && r("33"), e._hostNode) return e._hostNode; + for (var t = []; !e._hostNode;) t.push(e), e._hostParent || r("34"), e = e._hostParent; + for (; t.length; e = t.pop()) p(e, e._hostNode); + return e._hostNode + }, + precacheChildNodes: p, + precacheNode: f, + uncacheNode: function(e) { + var t = e._hostNode; + t && (delete t[s], e._hostNode = null) + } + }; + e.exports = h + }, function(e, t) { + var n = e.exports = { + version: "2.5.5" + }; + "number" == typeof __e && (__e = n) + }, function(e, t, n) { + "use strict"; + var r = n(107), + o = ["kind", "resolve", "construct", "instanceOf", "predicate", "represent", "defaultStyle", "styleAliases"], + i = ["scalar", "sequence", "mapping"]; + e.exports = function(e, t) { + var n, a; + if (t = t || {}, Object.keys(t).forEach(function(t) { + if (-1 === o.indexOf(t)) throw new r('Unknown option "' + t + '" is met in definition of "' + e + '" YAML type.') + }), this.tag = e, this.kind = t.kind || null, this.resolve = t.resolve || function() { + return !0 + }, this.construct = t.construct || function(e) { + return e + }, this.instanceOf = t.instanceOf || null, this.predicate = t.predicate || null, this.represent = t.represent || null, this.defaultStyle = t.defaultStyle || null, this.styleAliases = (n = t.styleAliases || null, a = {}, null !== n && Object.keys(n).forEach(function(e) { + n[e].forEach(function(t) { + a[String(t)] = e + }) + }), a), -1 === i.indexOf(this.kind)) throw new r('Unknown kind "' + this.kind + '" is specified for "' + e + '" YAML type.') + } + }, function(e, t, n) { + var r = n(242)("wks"), + o = n(167), + i = n(33).Symbol, + a = "function" == typeof i; + (e.exports = function(e) { + return r[e] || (r[e] = a && i[e] || (a ? i : o)("Symbol." + e)) + }).store = r + }, function(e, t, n) { + "use strict"; + t.__esModule = !0; + var r = i(n(568)), + o = i(n(91)); + + function i(e) { + return e && e.__esModule ? e : { + default: e + } + } + t.default = function() { + return function(e, t) { + if (Array.isArray(e)) return e; + if ((0, r.default)(Object(e))) return function(e, t) { + var n = [], + r = !0, + i = !1, + a = void 0; + try { + for (var u, s = (0, o.default)(e); !(r = (u = s.next()).done) && (n.push(u.value), !t || n.length !== t); r = !0); + } catch (e) { + i = !0, a = e + } finally { + try { + !r && s.return && s.return() + } finally { + if (i) throw a + } + } + return n + }(e, t); + throw new TypeError("Invalid attempt to destructure non-iterable instance") + } + }() + }, function(e, t, n) { + var r = n(162)("wks"), + o = n(116), + i = n(21).Symbol, + a = "function" == typeof i; + (e.exports = function(e) { + return r[e] || (r[e] = a && i[e] || (a ? i : o)("Symbol." + e)) + }).store = r + }, function(e, t, n) { + var r = n(21), + o = n(15), + i = n(49), + a = n(50), + u = n(52), + s = function(e, t, n) { + var l, c, f, p = e & s.F, + d = e & s.G, + h = e & s.S, + v = e & s.P, + m = e & s.B, + g = e & s.W, + y = d ? o : o[t] || (o[t] = {}), + b = y.prototype, + _ = d ? r : h ? r[t] : (r[t] || {}).prototype; + for (l in d && (n = t), n)(c = !p && _ && void 0 !== _[l]) && u(y, l) || (f = c ? _[l] : n[l], y[l] = d && "function" != typeof _[l] ? n[l] : m && c ? i(f, r) : g && _[l] == f ? function(e) { + var t = function(t, n, r) { + if (this instanceof e) { + switch (arguments.length) { + case 0: + return new e; + case 1: + return new e(t); + case 2: + return new e(t, n) + } + return new e(t, n, r) + } + return e.apply(this, arguments) + }; + return t.prototype = e.prototype, t + }(f) : v && "function" == typeof f ? i(Function.call, f) : f, v && ((y.virtual || (y.virtual = {}))[l] = f, e & s.R && b && !b[l] && a(b, l, f))) + }; + s.F = 1, s.G = 2, s.S = 4, s.P = 8, s.B = 16, s.W = 32, s.U = 64, s.R = 128, e.exports = s + }, function(e, t) { + var n = e.exports = "undefined" != typeof window && window.Math == Math ? window : "undefined" != typeof self && self.Math == Math ? self : Function("return this")(); + "number" == typeof __g && (__g = n) + }, function(e, t, n) { + "use strict"; + t.__esModule = !0; + var r, o = n(262), + i = (r = o) && r.__esModule ? r : { + default: r + }; + t.default = function(e, t, n) { + return t in e ? (0, i.default)(e, t, { + value: n, + enumerable: !0, + configurable: !0, + writable: !0 + }) : e[t] = n, e + } + }, function(e, t, n) { + e.exports = { + default: n(533), + __esModule: !0 + } + }, function(e, t) { + var n = Array.isArray; + e.exports = n + }, function(e, t, n) { + "use strict"; + t.__esModule = !0; + var r, o = n(23), + i = (r = o) && r.__esModule ? r : { + default: r + }; + t.default = i.default || function(e) { + for (var t = 1; t < arguments.length; t++) { + var n = arguments[t]; + for (var r in n) Object.prototype.hasOwnProperty.call(n, r) && (e[r] = n[r]) + } + return e + } + }, function(e, t, n) { + "use strict"; + var r = !("undefined" == typeof window || !window.document || !window.document.createElement), + o = { + canUseDOM: r, + canUseWorkers: "undefined" != typeof Worker, + canUseEventListeners: r && !(!window.addEventListener && !window.attachEvent), + canUseViewport: r && !!window.screen, + isInWorker: !r + }; + e.exports = o + }, function(e, t, n) { + "use strict"; + var r = Object.prototype.hasOwnProperty; + + function o(e, t) { + return !!e && r.call(e, t) + } + var i = /\\([\\!"#$%&'()*+,.\/:;<=>?@[\]^_`{|}~-])/g; + + function a(e) { + return !(e >= 55296 && e <= 57343) && (!(e >= 64976 && e <= 65007) && (65535 != (65535 & e) && 65534 != (65535 & e) && (!(e >= 0 && e <= 8) && (11 !== e && (!(e >= 14 && e <= 31) && (!(e >= 127 && e <= 159) && !(e > 1114111))))))) + } + + function u(e) { + if (e > 65535) { + var t = 55296 + ((e -= 65536) >> 10), + n = 56320 + (1023 & e); + return String.fromCharCode(t, n) + } + return String.fromCharCode(e) + } + var s = /&([a-z#][a-z0-9]{1,31});/gi, + l = /^#((?:x[a-f0-9]{1,8}|[0-9]{1,8}))/i, + c = n(416); + + function f(e, t) { + var n = 0; + return o(c, t) ? c[t] : 35 === t.charCodeAt(0) && l.test(t) && a(n = "x" === t[1].toLowerCase() ? parseInt(t.slice(2), 16) : parseInt(t.slice(1), 10)) ? u(n) : e + } + var p = /[&<>"]/, + d = /[&<>"]/g, + h = { + "&": "&", + "<": "<", + ">": ">", + '"': """ + }; + + function v(e) { + return h[e] + } + t.assign = function(e) { + return [].slice.call(arguments, 1).forEach(function(t) { + if (t) { + if ("object" != typeof t) throw new TypeError(t + "must be object"); + Object.keys(t).forEach(function(n) { + e[n] = t[n] + }) + } + }), e + }, t.isString = function(e) { + return "[object String]" === function(e) { + return Object.prototype.toString.call(e) + }(e) + }, t.has = o, t.unescapeMd = function(e) { + return e.indexOf("\\") < 0 ? e : e.replace(i, "$1") + }, t.isValidEntityCode = a, t.fromCodePoint = u, t.replaceEntities = function(e) { + return e.indexOf("&") < 0 ? e : e.replace(s, f) + }, t.escapeHtml = function(e) { + return p.test(e) ? e.replace(d, v) : e + } + }, function(e, t) { + e.exports = function(e) { + return "object" == typeof e ? null !== e : "function" == typeof e + } + }, function(e, t, n) { + var r = n(33), + o = n(60), + i = n(58), + a = n(73), + u = n(120), + s = function(e, t, n) { + var l, c, f, p, d = e & s.F, + h = e & s.G, + v = e & s.S, + m = e & s.P, + g = e & s.B, + y = h ? r : v ? r[t] || (r[t] = {}) : (r[t] || {}).prototype, + b = h ? o : o[t] || (o[t] = {}), + _ = b.prototype || (b.prototype = {}); + for (l in h && (n = t), n) f = ((c = !d && y && void 0 !== y[l]) ? y : n)[l], p = g && c ? u(f, r) : m && "function" == typeof f ? u(Function.call, f) : f, y && a(y, l, f, e & s.U), b[l] != f && i(b, l, p), m && _[l] != f && (_[l] = f) + }; + r.core = o, s.F = 1, s.G = 2, s.S = 4, s.P = 8, s.B = 16, s.W = 32, s.U = 64, s.R = 128, e.exports = s + }, function(e, t, n) { + var r = n(29), + o = n(101), + i = n(53), + a = /"/g, + u = function(e, t, n, r) { + var o = String(i(e)), + u = "<" + t; + return "" !== n && (u += " " + n + '="' + String(r).replace(a, """) + '"'), u + ">" + o + "" + }; + e.exports = function(e, t) { + var n = {}; + n[e] = t(u), r(r.P + r.F * o(function() { + var t = "" [e]('"'); + return t !== t.toLowerCase() || t.split('"').length > 3 + }), "String", n) + } + }, function(e, t) { + var n; + n = function() { + return this + }(); + try { + n = n || Function("return this")() || (0, eval)("this") + } catch (e) { + "object" == typeof window && (n = window) + } + e.exports = n + }, function(e, t, n) { + "use strict"; + var r, o = n(91), + i = (r = o) && r.__esModule ? r : { + default: r + }; + e.exports = function() { + var e = { + location: {}, + history: {}, + open: function() {}, + close: function() {}, + File: function() {} + }; + if ("undefined" == typeof window) return e; + try { + e = window; + var t = !0, + n = !1, + r = void 0; + try { + for (var o, a = (0, i.default)(["File", "Blob", "FormData"]); !(t = (o = a.next()).done); t = !0) { + var u = o.value; + u in window && (e[u] = window[u]) + } + } catch (e) { + n = !0, r = e + } finally { + try { + !t && a.return && a.return() + } finally { + if (n) throw r + } + } + } catch (e) { + console.error(e) + } + return e + }() + }, function(e, t) { + var n = e.exports = "undefined" != typeof window && window.Math == Math ? window : "undefined" != typeof self && self.Math == Math ? self : Function("return this")(); + "number" == typeof __g && (__g = n) + }, function(e, t, n) { + "use strict"; + + function r(e) { + return function() { + return e + } + } + var o = function() {}; + o.thatReturns = r, o.thatReturnsFalse = r(!1), o.thatReturnsTrue = r(!0), o.thatReturnsNull = r(null), o.thatReturnsThis = function() { + return this + }, o.thatReturnsArgument = function(e) { + return e + }, e.exports = o + }, function(e, t, n) { + "use strict"; + Object.defineProperty(t, "__esModule", { + value: !0 + }); + var r = i(n(25)); + t.isOAS3 = a, t.isSwagger2 = function(e) { + var t = e.get("swagger"); + if ("string" != typeof t) return !1; + return t.startsWith("2.0") + }, t.OAS3ComponentWrapFactory = function(e) { + return function(t, n) { + return function(i) { + if (n && n.specSelectors && n.specSelectors.specJson) { + var u = n.specSelectors.specJson(); + return a(u) ? o.default.createElement(e, (0, r.default)({}, i, n, { + Ori: t + })) : o.default.createElement(t, i) + } + return console.warn("OAS3 wrapper: couldn't get spec"), null + } + } + }; + var o = i(n(0)); + + function i(e) { + return e && e.__esModule ? e : { + default: e + } + } + + function a(e) { + var t = e.get("openapi"); + return "string" == typeof t && (t.startsWith("3.0.") && t.length > 4) + } + }, function(e, t, n) { + var r = n(28); + e.exports = function(e) { + if (!r(e)) throw TypeError(e + " is not an object!"); + return e + } + }, function(e, t, n) { + var r = n(278), + o = "object" == typeof self && self && self.Object === Object && self, + i = r || o || Function("return this")(); + e.exports = i + }, function(e, t) { + e.exports = function(e) { + var t = typeof e; + return null != e && ("object" == t || "function" == t) + } + }, function(e, t, n) { + "use strict"; + var r = null; + e.exports = { + debugTool: r + } + }, function(e, t, n) { + var r = n(36), + o = n(238), + i = n(157), + a = Object.defineProperty; + t.f = n(44) ? Object.defineProperty : function(e, t, n) { + if (r(e), t = i(t, !0), r(n), o) try { + return a(e, t, n) + } catch (e) {} + if ("get" in n || "set" in n) throw TypeError("Accessors not supported!"); + return "value" in n && (e[t] = n.value), e + } + }, function(e, t, n) { + e.exports = { + default: n(516), + __esModule: !0 + } + }, function(e, t, n) { + e.exports = { + default: n(517), + __esModule: !0 + } + }, function(e, t, n) { + "use strict"; + var r = n(11), + o = n(13), + i = n(354), + a = n(69), + u = n(355), + s = n(88), + l = n(147), + c = n(8), + f = [], + p = 0, + d = i.getPooled(), + h = !1, + v = null; + + function m() { + E.ReactReconcileTransaction && v || r("123") + } + var g = [{ + initialize: function() { + this.dirtyComponentsLength = f.length + }, + close: function() { + this.dirtyComponentsLength !== f.length ? (f.splice(0, this.dirtyComponentsLength), w()) : f.length = 0 + } + }, { + initialize: function() { + this.callbackQueue.reset() + }, + close: function() { + this.callbackQueue.notifyAll() + } + }]; + + function y() { + this.reinitializeTransaction(), this.dirtyComponentsLength = null, this.callbackQueue = i.getPooled(), this.reconcileTransaction = E.ReactReconcileTransaction.getPooled(!0) + } + + function b(e, t) { + return e._mountOrder - t._mountOrder + } + + function _(e) { + var t = e.dirtyComponentsLength; + t !== f.length && r("124", t, f.length), f.sort(b), p++; + for (var n = 0; n < t; n++) { + var o, i = f[n], + a = i._pendingCallbacks; + if (i._pendingCallbacks = null, u.logTopLevelRenders) { + var l = i; + i._currentElement.type.isReactTopLevelWrapper && (l = i._renderedComponent), o = "React update: " + l.getName(), console.time(o) + } + if (s.performUpdateIfNecessary(i, e.reconcileTransaction, p), o && console.timeEnd(o), a) + for (var c = 0; c < a.length; c++) e.callbackQueue.enqueue(a[c], i.getPublicInstance()) + } + } + o(y.prototype, l, { + getTransactionWrappers: function() { + return g + }, + destructor: function() { + this.dirtyComponentsLength = null, i.release(this.callbackQueue), this.callbackQueue = null, E.ReactReconcileTransaction.release(this.reconcileTransaction), this.reconcileTransaction = null + }, + perform: function(e, t, n) { + return l.perform.call(this, this.reconcileTransaction.perform, this.reconcileTransaction, e, t, n) + } + }), a.addPoolingTo(y); + var w = function() { + for (; f.length || h;) { + if (f.length) { + var e = y.getPooled(); + e.perform(_, null, e), y.release(e) + } + if (h) { + h = !1; + var t = d; + d = i.getPooled(), t.notifyAll(), i.release(t) + } + } + }; + var E = { + ReactReconcileTransaction: null, + batchedUpdates: function(e, t, n, r, o, i) { + return m(), v.batchedUpdates(e, t, n, r, o, i) + }, + enqueueUpdate: function e(t) { + m(), v.isBatchingUpdates ? (f.push(t), null == t._updateBatchNumber && (t._updateBatchNumber = p + 1)) : v.batchedUpdates(e, t) + }, + flushBatchedUpdates: w, + injection: { + injectReconcileTransaction: function(e) { + e || r("126"), E.ReactReconcileTransaction = e + }, + injectBatchingStrategy: function(e) { + e || r("127"), "function" != typeof e.batchedUpdates && r("128"), "boolean" != typeof e.isBatchingUpdates && r("129"), v = e + } + }, + asap: function(e, t) { + c(v.isBatchingUpdates, "ReactUpdates.asap: Can't enqueue an asap callback in a context whereupdates are not being batched."), d.enqueue(e, t), h = !0 + } + }; + e.exports = E + }, function(e, t, n) { + e.exports = !n(51)(function() { + return 7 != Object.defineProperty({}, "a", { + get: function() { + return 7 + } + }).a + }) + }, function(e, t, n) { + "use strict"; + t.__esModule = !0; + var r = a(n(519)), + o = a(n(521)), + i = "function" == typeof o.default && "symbol" == typeof r.default ? function(e) { + return typeof e + } : function(e) { + return e && "function" == typeof o.default && e.constructor === o.default && e !== o.default.prototype ? "symbol" : typeof e + }; + + function a(e) { + return e && e.__esModule ? e : { + default: e + } + } + t.default = "function" == typeof o.default && "symbol" === i(r.default) ? function(e) { + return void 0 === e ? "undefined" : i(e) + } : function(e) { + return e && "function" == typeof o.default && e.constructor === o.default && e !== o.default.prototype ? "symbol" : void 0 === e ? "undefined" : i(e) + } + }, function(e, t, n) { + "use strict"; + e.exports = { + current: null + } + }, function(e, t) { + e.exports = function(e) { + return null != e && "object" == typeof e + } + }, function(e, t, n) { + "use strict"; + var r = n(13), + o = n(69), + i = n(34), + a = (n(9), ["dispatchConfig", "_targetInst", "nativeEvent", "isDefaultPrevented", "isPropagationStopped", "_dispatchListeners", "_dispatchInstances"]), + u = { + type: null, + target: null, + currentTarget: i.thatReturnsNull, + eventPhase: null, + bubbles: null, + cancelable: null, + timeStamp: function(e) { + return e.timeStamp || Date.now() + }, + defaultPrevented: null, + isTrusted: null + }; + + function s(e, t, n, r) { + this.dispatchConfig = e, this._targetInst = t, this.nativeEvent = n; + var o = this.constructor.Interface; + for (var a in o) + if (o.hasOwnProperty(a)) { + 0; + var u = o[a]; + u ? this[a] = u(n) : "target" === a ? this.target = r : this[a] = n[a] + } + var s = null != n.defaultPrevented ? n.defaultPrevented : !1 === n.returnValue; + return this.isDefaultPrevented = s ? i.thatReturnsTrue : i.thatReturnsFalse, this.isPropagationStopped = i.thatReturnsFalse, this + } + r(s.prototype, { + preventDefault: function() { + this.defaultPrevented = !0; + var e = this.nativeEvent; + e && (e.preventDefault ? e.preventDefault() : "unknown" != typeof e.returnValue && (e.returnValue = !1), this.isDefaultPrevented = i.thatReturnsTrue) + }, + stopPropagation: function() { + var e = this.nativeEvent; + e && (e.stopPropagation ? e.stopPropagation() : "unknown" != typeof e.cancelBubble && (e.cancelBubble = !0), this.isPropagationStopped = i.thatReturnsTrue) + }, + persist: function() { + this.isPersistent = i.thatReturnsTrue + }, + isPersistent: i.thatReturnsFalse, + destructor: function() { + var e = this.constructor.Interface; + for (var t in e) this[t] = null; + for (var n = 0; n < a.length; n++) this[a[n]] = null + } + }), s.Interface = u, s.augmentClass = function(e, t) { + var n = function() {}; + n.prototype = this.prototype; + var i = new n; + r(i, e.prototype), e.prototype = i, e.prototype.constructor = e, e.Interface = r({}, this.Interface, t), e.augmentClass = this.augmentClass, o.addPoolingTo(e, o.fourArgumentPooler) + }, o.addPoolingTo(s, o.fourArgumentPooler), e.exports = s + }, function(e, t, n) { + var r = n(94); + e.exports = function(e, t, n) { + if (r(e), void 0 === t) return e; + switch (n) { + case 1: + return function(n) { + return e.call(t, n) + }; + case 2: + return function(n, r) { + return e.call(t, n, r) + }; + case 3: + return function(n, r, o) { + return e.call(t, n, r, o) + } + } + return function() { + return e.apply(t, arguments) + } + } + }, function(e, t, n) { + var r = n(40), + o = n(95); + e.exports = n(44) ? function(e, t, n) { + return r.f(e, t, o(1, n)) + } : function(e, t, n) { + return e[t] = n, e + } + }, function(e, t) { + e.exports = function(e) { + try { + return !!e() + } catch (e) { + return !0 + } + } + }, function(e, t) { + var n = {}.hasOwnProperty; + e.exports = function(e, t) { + return n.call(e, t) + } + }, function(e, t) { + e.exports = function(e) { + if (void 0 == e) throw TypeError("Can't call method on " + e); + return e + } + }, function(e, t, n) { + "use strict"; + (function(e) { + /*! + * The buffer module from node.js, for the browser. + * + * @author Feross Aboukhadijeh + * @license MIT + */ + var r = n(528), + o = n(529), + i = n(261); + + function a() { + return s.TYPED_ARRAY_SUPPORT ? 2147483647 : 1073741823 + } + + function u(e, t) { + if (a() < t) throw new RangeError("Invalid typed array length"); + return s.TYPED_ARRAY_SUPPORT ? (e = new Uint8Array(t)).__proto__ = s.prototype : (null === e && (e = new s(t)), e.length = t), e + } + + function s(e, t, n) { + if (!(s.TYPED_ARRAY_SUPPORT || this instanceof s)) return new s(e, t, n); + if ("number" == typeof e) { + if ("string" == typeof t) throw new Error("If encoding is specified then the first argument must be a string"); + return f(this, e) + } + return l(this, e, t, n) + } + + function l(e, t, n, r) { + if ("number" == typeof t) throw new TypeError('"value" argument must not be a number'); + return "undefined" != typeof ArrayBuffer && t instanceof ArrayBuffer ? function(e, t, n, r) { + if (t.byteLength, n < 0 || t.byteLength < n) throw new RangeError("'offset' is out of bounds"); + if (t.byteLength < n + (r || 0)) throw new RangeError("'length' is out of bounds"); + t = void 0 === n && void 0 === r ? new Uint8Array(t) : void 0 === r ? new Uint8Array(t, n) : new Uint8Array(t, n, r); + s.TYPED_ARRAY_SUPPORT ? (e = t).__proto__ = s.prototype : e = p(e, t); + return e + }(e, t, n, r) : "string" == typeof t ? function(e, t, n) { + "string" == typeof n && "" !== n || (n = "utf8"); + if (!s.isEncoding(n)) throw new TypeError('"encoding" must be a valid string encoding'); + var r = 0 | h(t, n), + o = (e = u(e, r)).write(t, n); + o !== r && (e = e.slice(0, o)); + return e + }(e, t, n) : function(e, t) { + if (s.isBuffer(t)) { + var n = 0 | d(t.length); + return 0 === (e = u(e, n)).length ? e : (t.copy(e, 0, 0, n), e) + } + if (t) { + if ("undefined" != typeof ArrayBuffer && t.buffer instanceof ArrayBuffer || "length" in t) return "number" != typeof t.length || (r = t.length) != r ? u(e, 0) : p(e, t); + if ("Buffer" === t.type && i(t.data)) return p(e, t.data) + } + var r; + throw new TypeError("First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.") + }(e, t) + } + + function c(e) { + if ("number" != typeof e) throw new TypeError('"size" argument must be a number'); + if (e < 0) throw new RangeError('"size" argument must not be negative') + } + + function f(e, t) { + if (c(t), e = u(e, t < 0 ? 0 : 0 | d(t)), !s.TYPED_ARRAY_SUPPORT) + for (var n = 0; n < t; ++n) e[n] = 0; + return e + } + + function p(e, t) { + var n = t.length < 0 ? 0 : 0 | d(t.length); + e = u(e, n); + for (var r = 0; r < n; r += 1) e[r] = 255 & t[r]; + return e + } + + function d(e) { + if (e >= a()) throw new RangeError("Attempt to allocate Buffer larger than maximum size: 0x" + a().toString(16) + " bytes"); + return 0 | e + } + + function h(e, t) { + if (s.isBuffer(e)) return e.length; + if ("undefined" != typeof ArrayBuffer && "function" == typeof ArrayBuffer.isView && (ArrayBuffer.isView(e) || e instanceof ArrayBuffer)) return e.byteLength; + "string" != typeof e && (e = "" + e); + var n = e.length; + if (0 === n) return 0; + for (var r = !1;;) switch (t) { + case "ascii": + case "latin1": + case "binary": + return n; + case "utf8": + case "utf-8": + case void 0: + return F(e).length; + case "ucs2": + case "ucs-2": + case "utf16le": + case "utf-16le": + return 2 * n; + case "hex": + return n >>> 1; + case "base64": + return z(e).length; + default: + if (r) return F(e).length; + t = ("" + t).toLowerCase(), r = !0 + } + } + + function v(e, t, n) { + var r = e[t]; + e[t] = e[n], e[n] = r + } + + function m(e, t, n, r, o) { + if (0 === e.length) return -1; + if ("string" == typeof n ? (r = n, n = 0) : n > 2147483647 ? n = 2147483647 : n < -2147483648 && (n = -2147483648), n = +n, isNaN(n) && (n = o ? 0 : e.length - 1), n < 0 && (n = e.length + n), n >= e.length) { + if (o) return -1; + n = e.length - 1 + } else if (n < 0) { + if (!o) return -1; + n = 0 + } + if ("string" == typeof t && (t = s.from(t, r)), s.isBuffer(t)) return 0 === t.length ? -1 : g(e, t, n, r, o); + if ("number" == typeof t) return t &= 255, s.TYPED_ARRAY_SUPPORT && "function" == typeof Uint8Array.prototype.indexOf ? o ? Uint8Array.prototype.indexOf.call(e, t, n) : Uint8Array.prototype.lastIndexOf.call(e, t, n) : g(e, [t], n, r, o); + throw new TypeError("val must be string, number or Buffer") + } + + function g(e, t, n, r, o) { + var i, a = 1, + u = e.length, + s = t.length; + if (void 0 !== r && ("ucs2" === (r = String(r).toLowerCase()) || "ucs-2" === r || "utf16le" === r || "utf-16le" === r)) { + if (e.length < 2 || t.length < 2) return -1; + a = 2, u /= 2, s /= 2, n /= 2 + } + + function l(e, t) { + return 1 === a ? e[t] : e.readUInt16BE(t * a) + } + if (o) { + var c = -1; + for (i = n; i < u; i++) + if (l(e, i) === l(t, -1 === c ? 0 : i - c)) { + if (-1 === c && (c = i), i - c + 1 === s) return c * a + } else -1 !== c && (i -= i - c), c = -1 + } else + for (n + s > u && (n = u - s), i = n; i >= 0; i--) { + for (var f = !0, p = 0; p < s; p++) + if (l(e, i + p) !== l(t, p)) { + f = !1; + break + } + if (f) return i + } + return -1 + } + + function y(e, t, n, r) { + n = Number(n) || 0; + var o = e.length - n; + r ? (r = Number(r)) > o && (r = o) : r = o; + var i = t.length; + if (i % 2 != 0) throw new TypeError("Invalid hex string"); + r > i / 2 && (r = i / 2); + for (var a = 0; a < r; ++a) { + var u = parseInt(t.substr(2 * a, 2), 16); + if (isNaN(u)) return a; + e[n + a] = u + } + return a + } + + function b(e, t, n, r) { + return B(F(t, e.length - n), e, n, r) + } + + function _(e, t, n, r) { + return B(function(e) { + for (var t = [], n = 0; n < e.length; ++n) t.push(255 & e.charCodeAt(n)); + return t + }(t), e, n, r) + } + + function w(e, t, n, r) { + return _(e, t, n, r) + } + + function E(e, t, n, r) { + return B(z(t), e, n, r) + } + + function x(e, t, n, r) { + return B(function(e, t) { + for (var n, r, o, i = [], a = 0; a < e.length && !((t -= 2) < 0); ++a) n = e.charCodeAt(a), r = n >> 8, o = n % 256, i.push(o), i.push(r); + return i + }(t, e.length - n), e, n, r) + } + + function S(e, t, n) { + return 0 === t && n === e.length ? r.fromByteArray(e) : r.fromByteArray(e.slice(t, n)) + } + + function C(e, t, n) { + n = Math.min(e.length, n); + for (var r = [], o = t; o < n;) { + var i, a, u, s, l = e[o], + c = null, + f = l > 239 ? 4 : l > 223 ? 3 : l > 191 ? 2 : 1; + if (o + f <= n) switch (f) { + case 1: + l < 128 && (c = l); + break; + case 2: + 128 == (192 & (i = e[o + 1])) && (s = (31 & l) << 6 | 63 & i) > 127 && (c = s); + break; + case 3: + i = e[o + 1], a = e[o + 2], 128 == (192 & i) && 128 == (192 & a) && (s = (15 & l) << 12 | (63 & i) << 6 | 63 & a) > 2047 && (s < 55296 || s > 57343) && (c = s); + break; + case 4: + i = e[o + 1], a = e[o + 2], u = e[o + 3], 128 == (192 & i) && 128 == (192 & a) && 128 == (192 & u) && (s = (15 & l) << 18 | (63 & i) << 12 | (63 & a) << 6 | 63 & u) > 65535 && s < 1114112 && (c = s) + } + null === c ? (c = 65533, f = 1) : c > 65535 && (c -= 65536, r.push(c >>> 10 & 1023 | 55296), c = 56320 | 1023 & c), r.push(c), o += f + } + return function(e) { + var t = e.length; + if (t <= k) return String.fromCharCode.apply(String, e); + var n = "", + r = 0; + for (; r < t;) n += String.fromCharCode.apply(String, e.slice(r, r += k)); + return n + }(r) + } + t.Buffer = s, t.SlowBuffer = function(e) { + +e != e && (e = 0); + return s.alloc(+e) + }, t.INSPECT_MAX_BYTES = 50, s.TYPED_ARRAY_SUPPORT = void 0 !== e.TYPED_ARRAY_SUPPORT ? e.TYPED_ARRAY_SUPPORT : function() { + try { + var e = new Uint8Array(1); + return e.__proto__ = { + __proto__: Uint8Array.prototype, + foo: function() { + return 42 + } + }, 42 === e.foo() && "function" == typeof e.subarray && 0 === e.subarray(1, 1).byteLength + } catch (e) { + return !1 + } + }(), t.kMaxLength = a(), s.poolSize = 8192, s._augment = function(e) { + return e.__proto__ = s.prototype, e + }, s.from = function(e, t, n) { + return l(null, e, t, n) + }, s.TYPED_ARRAY_SUPPORT && (s.prototype.__proto__ = Uint8Array.prototype, s.__proto__ = Uint8Array, "undefined" != typeof Symbol && Symbol.species && s[Symbol.species] === s && Object.defineProperty(s, Symbol.species, { + value: null, + configurable: !0 + })), s.alloc = function(e, t, n) { + return function(e, t, n, r) { + return c(t), t <= 0 ? u(e, t) : void 0 !== n ? "string" == typeof r ? u(e, t).fill(n, r) : u(e, t).fill(n) : u(e, t) + }(null, e, t, n) + }, s.allocUnsafe = function(e) { + return f(null, e) + }, s.allocUnsafeSlow = function(e) { + return f(null, e) + }, s.isBuffer = function(e) { + return !(null == e || !e._isBuffer) + }, s.compare = function(e, t) { + if (!s.isBuffer(e) || !s.isBuffer(t)) throw new TypeError("Arguments must be Buffers"); + if (e === t) return 0; + for (var n = e.length, r = t.length, o = 0, i = Math.min(n, r); o < i; ++o) + if (e[o] !== t[o]) { + n = e[o], r = t[o]; + break + } + return n < r ? -1 : r < n ? 1 : 0 + }, s.isEncoding = function(e) { + switch (String(e).toLowerCase()) { + case "hex": + case "utf8": + case "utf-8": + case "ascii": + case "latin1": + case "binary": + case "base64": + case "ucs2": + case "ucs-2": + case "utf16le": + case "utf-16le": + return !0; + default: + return !1 + } + }, s.concat = function(e, t) { + if (!i(e)) throw new TypeError('"list" argument must be an Array of Buffers'); + if (0 === e.length) return s.alloc(0); + var n; + if (void 0 === t) + for (t = 0, n = 0; n < e.length; ++n) t += e[n].length; + var r = s.allocUnsafe(t), + o = 0; + for (n = 0; n < e.length; ++n) { + var a = e[n]; + if (!s.isBuffer(a)) throw new TypeError('"list" argument must be an Array of Buffers'); + a.copy(r, o), o += a.length + } + return r + }, s.byteLength = h, s.prototype._isBuffer = !0, s.prototype.swap16 = function() { + var e = this.length; + if (e % 2 != 0) throw new RangeError("Buffer size must be a multiple of 16-bits"); + for (var t = 0; t < e; t += 2) v(this, t, t + 1); + return this + }, s.prototype.swap32 = function() { + var e = this.length; + if (e % 4 != 0) throw new RangeError("Buffer size must be a multiple of 32-bits"); + for (var t = 0; t < e; t += 4) v(this, t, t + 3), v(this, t + 1, t + 2); + return this + }, s.prototype.swap64 = function() { + var e = this.length; + if (e % 8 != 0) throw new RangeError("Buffer size must be a multiple of 64-bits"); + for (var t = 0; t < e; t += 8) v(this, t, t + 7), v(this, t + 1, t + 6), v(this, t + 2, t + 5), v(this, t + 3, t + 4); + return this + }, s.prototype.toString = function() { + var e = 0 | this.length; + return 0 === e ? "" : 0 === arguments.length ? C(this, 0, e) : function(e, t, n) { + var r = !1; + if ((void 0 === t || t < 0) && (t = 0), t > this.length) return ""; + if ((void 0 === n || n > this.length) && (n = this.length), n <= 0) return ""; + if ((n >>>= 0) <= (t >>>= 0)) return ""; + for (e || (e = "utf8");;) switch (e) { + case "hex": + return P(this, t, n); + case "utf8": + case "utf-8": + return C(this, t, n); + case "ascii": + return A(this, t, n); + case "latin1": + case "binary": + return O(this, t, n); + case "base64": + return S(this, t, n); + case "ucs2": + case "ucs-2": + case "utf16le": + case "utf-16le": + return T(this, t, n); + default: + if (r) throw new TypeError("Unknown encoding: " + e); + e = (e + "").toLowerCase(), r = !0 + } + }.apply(this, arguments) + }, s.prototype.equals = function(e) { + if (!s.isBuffer(e)) throw new TypeError("Argument must be a Buffer"); + return this === e || 0 === s.compare(this, e) + }, s.prototype.inspect = function() { + var e = "", + n = t.INSPECT_MAX_BYTES; + return this.length > 0 && (e = this.toString("hex", 0, n).match(/.{2}/g).join(" "), this.length > n && (e += " ... ")), "" + }, s.prototype.compare = function(e, t, n, r, o) { + if (!s.isBuffer(e)) throw new TypeError("Argument must be a Buffer"); + if (void 0 === t && (t = 0), void 0 === n && (n = e ? e.length : 0), void 0 === r && (r = 0), void 0 === o && (o = this.length), t < 0 || n > e.length || r < 0 || o > this.length) throw new RangeError("out of range index"); + if (r >= o && t >= n) return 0; + if (r >= o) return -1; + if (t >= n) return 1; + if (t >>>= 0, n >>>= 0, r >>>= 0, o >>>= 0, this === e) return 0; + for (var i = o - r, a = n - t, u = Math.min(i, a), l = this.slice(r, o), c = e.slice(t, n), f = 0; f < u; ++f) + if (l[f] !== c[f]) { + i = l[f], a = c[f]; + break + } + return i < a ? -1 : a < i ? 1 : 0 + }, s.prototype.includes = function(e, t, n) { + return -1 !== this.indexOf(e, t, n) + }, s.prototype.indexOf = function(e, t, n) { + return m(this, e, t, n, !0) + }, s.prototype.lastIndexOf = function(e, t, n) { + return m(this, e, t, n, !1) + }, s.prototype.write = function(e, t, n, r) { + if (void 0 === t) r = "utf8", n = this.length, t = 0; + else if (void 0 === n && "string" == typeof t) r = t, n = this.length, t = 0; + else { + if (!isFinite(t)) throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported"); + t |= 0, isFinite(n) ? (n |= 0, void 0 === r && (r = "utf8")) : (r = n, n = void 0) + } + var o = this.length - t; + if ((void 0 === n || n > o) && (n = o), e.length > 0 && (n < 0 || t < 0) || t > this.length) throw new RangeError("Attempt to write outside buffer bounds"); + r || (r = "utf8"); + for (var i = !1;;) switch (r) { + case "hex": + return y(this, e, t, n); + case "utf8": + case "utf-8": + return b(this, e, t, n); + case "ascii": + return _(this, e, t, n); + case "latin1": + case "binary": + return w(this, e, t, n); + case "base64": + return E(this, e, t, n); + case "ucs2": + case "ucs-2": + case "utf16le": + case "utf-16le": + return x(this, e, t, n); + default: + if (i) throw new TypeError("Unknown encoding: " + r); + r = ("" + r).toLowerCase(), i = !0 + } + }, s.prototype.toJSON = function() { + return { + type: "Buffer", + data: Array.prototype.slice.call(this._arr || this, 0) + } + }; + var k = 4096; + + function A(e, t, n) { + var r = ""; + n = Math.min(e.length, n); + for (var o = t; o < n; ++o) r += String.fromCharCode(127 & e[o]); + return r + } + + function O(e, t, n) { + var r = ""; + n = Math.min(e.length, n); + for (var o = t; o < n; ++o) r += String.fromCharCode(e[o]); + return r + } + + function P(e, t, n) { + var r = e.length; + (!t || t < 0) && (t = 0), (!n || n < 0 || n > r) && (n = r); + for (var o = "", i = t; i < n; ++i) o += q(e[i]); + return o + } + + function T(e, t, n) { + for (var r = e.slice(t, n), o = "", i = 0; i < r.length; i += 2) o += String.fromCharCode(r[i] + 256 * r[i + 1]); + return o + } + + function M(e, t, n) { + if (e % 1 != 0 || e < 0) throw new RangeError("offset is not uint"); + if (e + t > n) throw new RangeError("Trying to access beyond buffer length") + } + + function I(e, t, n, r, o, i) { + if (!s.isBuffer(e)) throw new TypeError('"buffer" argument must be a Buffer instance'); + if (t > o || t < i) throw new RangeError('"value" argument is out of bounds'); + if (n + r > e.length) throw new RangeError("Index out of range") + } + + function j(e, t, n, r) { + t < 0 && (t = 65535 + t + 1); + for (var o = 0, i = Math.min(e.length - n, 2); o < i; ++o) e[n + o] = (t & 255 << 8 * (r ? o : 1 - o)) >>> 8 * (r ? o : 1 - o) + } + + function N(e, t, n, r) { + t < 0 && (t = 4294967295 + t + 1); + for (var o = 0, i = Math.min(e.length - n, 4); o < i; ++o) e[n + o] = t >>> 8 * (r ? o : 3 - o) & 255 + } + + function R(e, t, n, r, o, i) { + if (n + r > e.length) throw new RangeError("Index out of range"); + if (n < 0) throw new RangeError("Index out of range") + } + + function D(e, t, n, r, i) { + return i || R(e, 0, n, 4), o.write(e, t, n, r, 23, 4), n + 4 + } + + function L(e, t, n, r, i) { + return i || R(e, 0, n, 8), o.write(e, t, n, r, 52, 8), n + 8 + } + s.prototype.slice = function(e, t) { + var n, r = this.length; + if (e = ~~e, t = void 0 === t ? r : ~~t, e < 0 ? (e += r) < 0 && (e = 0) : e > r && (e = r), t < 0 ? (t += r) < 0 && (t = 0) : t > r && (t = r), t < e && (t = e), s.TYPED_ARRAY_SUPPORT)(n = this.subarray(e, t)).__proto__ = s.prototype; + else { + var o = t - e; + n = new s(o, void 0); + for (var i = 0; i < o; ++i) n[i] = this[i + e] + } + return n + }, s.prototype.readUIntLE = function(e, t, n) { + e |= 0, t |= 0, n || M(e, t, this.length); + for (var r = this[e], o = 1, i = 0; ++i < t && (o *= 256);) r += this[e + i] * o; + return r + }, s.prototype.readUIntBE = function(e, t, n) { + e |= 0, t |= 0, n || M(e, t, this.length); + for (var r = this[e + --t], o = 1; t > 0 && (o *= 256);) r += this[e + --t] * o; + return r + }, s.prototype.readUInt8 = function(e, t) { + return t || M(e, 1, this.length), this[e] + }, s.prototype.readUInt16LE = function(e, t) { + return t || M(e, 2, this.length), this[e] | this[e + 1] << 8 + }, s.prototype.readUInt16BE = function(e, t) { + return t || M(e, 2, this.length), this[e] << 8 | this[e + 1] + }, s.prototype.readUInt32LE = function(e, t) { + return t || M(e, 4, this.length), (this[e] | this[e + 1] << 8 | this[e + 2] << 16) + 16777216 * this[e + 3] + }, s.prototype.readUInt32BE = function(e, t) { + return t || M(e, 4, this.length), 16777216 * this[e] + (this[e + 1] << 16 | this[e + 2] << 8 | this[e + 3]) + }, s.prototype.readIntLE = function(e, t, n) { + e |= 0, t |= 0, n || M(e, t, this.length); + for (var r = this[e], o = 1, i = 0; ++i < t && (o *= 256);) r += this[e + i] * o; + return r >= (o *= 128) && (r -= Math.pow(2, 8 * t)), r + }, s.prototype.readIntBE = function(e, t, n) { + e |= 0, t |= 0, n || M(e, t, this.length); + for (var r = t, o = 1, i = this[e + --r]; r > 0 && (o *= 256);) i += this[e + --r] * o; + return i >= (o *= 128) && (i -= Math.pow(2, 8 * t)), i + }, s.prototype.readInt8 = function(e, t) { + return t || M(e, 1, this.length), 128 & this[e] ? -1 * (255 - this[e] + 1) : this[e] + }, s.prototype.readInt16LE = function(e, t) { + t || M(e, 2, this.length); + var n = this[e] | this[e + 1] << 8; + return 32768 & n ? 4294901760 | n : n + }, s.prototype.readInt16BE = function(e, t) { + t || M(e, 2, this.length); + var n = this[e + 1] | this[e] << 8; + return 32768 & n ? 4294901760 | n : n + }, s.prototype.readInt32LE = function(e, t) { + return t || M(e, 4, this.length), this[e] | this[e + 1] << 8 | this[e + 2] << 16 | this[e + 3] << 24 + }, s.prototype.readInt32BE = function(e, t) { + return t || M(e, 4, this.length), this[e] << 24 | this[e + 1] << 16 | this[e + 2] << 8 | this[e + 3] + }, s.prototype.readFloatLE = function(e, t) { + return t || M(e, 4, this.length), o.read(this, e, !0, 23, 4) + }, s.prototype.readFloatBE = function(e, t) { + return t || M(e, 4, this.length), o.read(this, e, !1, 23, 4) + }, s.prototype.readDoubleLE = function(e, t) { + return t || M(e, 8, this.length), o.read(this, e, !0, 52, 8) + }, s.prototype.readDoubleBE = function(e, t) { + return t || M(e, 8, this.length), o.read(this, e, !1, 52, 8) + }, s.prototype.writeUIntLE = function(e, t, n, r) { + (e = +e, t |= 0, n |= 0, r) || I(this, e, t, n, Math.pow(2, 8 * n) - 1, 0); + var o = 1, + i = 0; + for (this[t] = 255 & e; ++i < n && (o *= 256);) this[t + i] = e / o & 255; + return t + n + }, s.prototype.writeUIntBE = function(e, t, n, r) { + (e = +e, t |= 0, n |= 0, r) || I(this, e, t, n, Math.pow(2, 8 * n) - 1, 0); + var o = n - 1, + i = 1; + for (this[t + o] = 255 & e; --o >= 0 && (i *= 256);) this[t + o] = e / i & 255; + return t + n + }, s.prototype.writeUInt8 = function(e, t, n) { + return e = +e, t |= 0, n || I(this, e, t, 1, 255, 0), s.TYPED_ARRAY_SUPPORT || (e = Math.floor(e)), this[t] = 255 & e, t + 1 + }, s.prototype.writeUInt16LE = function(e, t, n) { + return e = +e, t |= 0, n || I(this, e, t, 2, 65535, 0), s.TYPED_ARRAY_SUPPORT ? (this[t] = 255 & e, this[t + 1] = e >>> 8) : j(this, e, t, !0), t + 2 + }, s.prototype.writeUInt16BE = function(e, t, n) { + return e = +e, t |= 0, n || I(this, e, t, 2, 65535, 0), s.TYPED_ARRAY_SUPPORT ? (this[t] = e >>> 8, this[t + 1] = 255 & e) : j(this, e, t, !1), t + 2 + }, s.prototype.writeUInt32LE = function(e, t, n) { + return e = +e, t |= 0, n || I(this, e, t, 4, 4294967295, 0), s.TYPED_ARRAY_SUPPORT ? (this[t + 3] = e >>> 24, this[t + 2] = e >>> 16, this[t + 1] = e >>> 8, this[t] = 255 & e) : N(this, e, t, !0), t + 4 + }, s.prototype.writeUInt32BE = function(e, t, n) { + return e = +e, t |= 0, n || I(this, e, t, 4, 4294967295, 0), s.TYPED_ARRAY_SUPPORT ? (this[t] = e >>> 24, this[t + 1] = e >>> 16, this[t + 2] = e >>> 8, this[t + 3] = 255 & e) : N(this, e, t, !1), t + 4 + }, s.prototype.writeIntLE = function(e, t, n, r) { + if (e = +e, t |= 0, !r) { + var o = Math.pow(2, 8 * n - 1); + I(this, e, t, n, o - 1, -o) + } + var i = 0, + a = 1, + u = 0; + for (this[t] = 255 & e; ++i < n && (a *= 256);) e < 0 && 0 === u && 0 !== this[t + i - 1] && (u = 1), this[t + i] = (e / a >> 0) - u & 255; + return t + n + }, s.prototype.writeIntBE = function(e, t, n, r) { + if (e = +e, t |= 0, !r) { + var o = Math.pow(2, 8 * n - 1); + I(this, e, t, n, o - 1, -o) + } + var i = n - 1, + a = 1, + u = 0; + for (this[t + i] = 255 & e; --i >= 0 && (a *= 256);) e < 0 && 0 === u && 0 !== this[t + i + 1] && (u = 1), this[t + i] = (e / a >> 0) - u & 255; + return t + n + }, s.prototype.writeInt8 = function(e, t, n) { + return e = +e, t |= 0, n || I(this, e, t, 1, 127, -128), s.TYPED_ARRAY_SUPPORT || (e = Math.floor(e)), e < 0 && (e = 255 + e + 1), this[t] = 255 & e, t + 1 + }, s.prototype.writeInt16LE = function(e, t, n) { + return e = +e, t |= 0, n || I(this, e, t, 2, 32767, -32768), s.TYPED_ARRAY_SUPPORT ? (this[t] = 255 & e, this[t + 1] = e >>> 8) : j(this, e, t, !0), t + 2 + }, s.prototype.writeInt16BE = function(e, t, n) { + return e = +e, t |= 0, n || I(this, e, t, 2, 32767, -32768), s.TYPED_ARRAY_SUPPORT ? (this[t] = e >>> 8, this[t + 1] = 255 & e) : j(this, e, t, !1), t + 2 + }, s.prototype.writeInt32LE = function(e, t, n) { + return e = +e, t |= 0, n || I(this, e, t, 4, 2147483647, -2147483648), s.TYPED_ARRAY_SUPPORT ? (this[t] = 255 & e, this[t + 1] = e >>> 8, this[t + 2] = e >>> 16, this[t + 3] = e >>> 24) : N(this, e, t, !0), t + 4 + }, s.prototype.writeInt32BE = function(e, t, n) { + return e = +e, t |= 0, n || I(this, e, t, 4, 2147483647, -2147483648), e < 0 && (e = 4294967295 + e + 1), s.TYPED_ARRAY_SUPPORT ? (this[t] = e >>> 24, this[t + 1] = e >>> 16, this[t + 2] = e >>> 8, this[t + 3] = 255 & e) : N(this, e, t, !1), t + 4 + }, s.prototype.writeFloatLE = function(e, t, n) { + return D(this, e, t, !0, n) + }, s.prototype.writeFloatBE = function(e, t, n) { + return D(this, e, t, !1, n) + }, s.prototype.writeDoubleLE = function(e, t, n) { + return L(this, e, t, !0, n) + }, s.prototype.writeDoubleBE = function(e, t, n) { + return L(this, e, t, !1, n) + }, s.prototype.copy = function(e, t, n, r) { + if (n || (n = 0), r || 0 === r || (r = this.length), t >= e.length && (t = e.length), t || (t = 0), r > 0 && r < n && (r = n), r === n) return 0; + if (0 === e.length || 0 === this.length) return 0; + if (t < 0) throw new RangeError("targetStart out of bounds"); + if (n < 0 || n >= this.length) throw new RangeError("sourceStart out of bounds"); + if (r < 0) throw new RangeError("sourceEnd out of bounds"); + r > this.length && (r = this.length), e.length - t < r - n && (r = e.length - t + n); + var o, i = r - n; + if (this === e && n < t && t < r) + for (o = i - 1; o >= 0; --o) e[o + t] = this[o + n]; + else if (i < 1e3 || !s.TYPED_ARRAY_SUPPORT) + for (o = 0; o < i; ++o) e[o + t] = this[o + n]; + else Uint8Array.prototype.set.call(e, this.subarray(n, n + i), t); + return i + }, s.prototype.fill = function(e, t, n, r) { + if ("string" == typeof e) { + if ("string" == typeof t ? (r = t, t = 0, n = this.length) : "string" == typeof n && (r = n, n = this.length), 1 === e.length) { + var o = e.charCodeAt(0); + o < 256 && (e = o) + } + if (void 0 !== r && "string" != typeof r) throw new TypeError("encoding must be a string"); + if ("string" == typeof r && !s.isEncoding(r)) throw new TypeError("Unknown encoding: " + r) + } else "number" == typeof e && (e &= 255); + if (t < 0 || this.length < t || this.length < n) throw new RangeError("Out of range index"); + if (n <= t) return this; + var i; + if (t >>>= 0, n = void 0 === n ? this.length : n >>> 0, e || (e = 0), "number" == typeof e) + for (i = t; i < n; ++i) this[i] = e; + else { + var a = s.isBuffer(e) ? e : F(new s(e, r).toString()), + u = a.length; + for (i = 0; i < n - t; ++i) this[i + t] = a[i % u] + } + return this + }; + var U = /[^+\/0-9A-Za-z-_]/g; + + function q(e) { + return e < 16 ? "0" + e.toString(16) : e.toString(16) + } + + function F(e, t) { + var n; + t = t || 1 / 0; + for (var r = e.length, o = null, i = [], a = 0; a < r; ++a) { + if ((n = e.charCodeAt(a)) > 55295 && n < 57344) { + if (!o) { + if (n > 56319) { + (t -= 3) > -1 && i.push(239, 191, 189); + continue + } + if (a + 1 === r) { + (t -= 3) > -1 && i.push(239, 191, 189); + continue + } + o = n; + continue + } + if (n < 56320) { + (t -= 3) > -1 && i.push(239, 191, 189), o = n; + continue + } + n = 65536 + (o - 55296 << 10 | n - 56320) + } else o && (t -= 3) > -1 && i.push(239, 191, 189); + if (o = null, n < 128) { + if ((t -= 1) < 0) break; + i.push(n) + } else if (n < 2048) { + if ((t -= 2) < 0) break; + i.push(n >> 6 | 192, 63 & n | 128) + } else if (n < 65536) { + if ((t -= 3) < 0) break; + i.push(n >> 12 | 224, n >> 6 & 63 | 128, 63 & n | 128) + } else { + if (!(n < 1114112)) throw new Error("Invalid code point"); + if ((t -= 4) < 0) break; + i.push(n >> 18 | 240, n >> 12 & 63 | 128, n >> 6 & 63 | 128, 63 & n | 128) + } + } + return i + } + + function z(e) { + return r.toByteArray(function(e) { + if ((e = function(e) { + return e.trim ? e.trim() : e.replace(/^\s+|\s+$/g, "") + }(e).replace(U, "")).length < 2) return ""; + for (; e.length % 4 != 0;) e += "="; + return e + }(e)) + } + + function B(e, t, n, r) { + for (var o = 0; o < r && !(o + n >= t.length || o >= e.length); ++o) t[o + n] = e[o]; + return o + } + }).call(t, n(31)) + }, function(e, t) { + var n, r, o = e.exports = {}; + + function i() { + throw new Error("setTimeout has not been defined") + } + + function a() { + throw new Error("clearTimeout has not been defined") + } + + function u(e) { + if (n === setTimeout) return setTimeout(e, 0); + if ((n === i || !n) && setTimeout) return n = setTimeout, setTimeout(e, 0); + try { + return n(e, 0) + } catch (t) { + try { + return n.call(null, e, 0) + } catch (t) { + return n.call(this, e, 0) + } + } + }! function() { + try { + n = "function" == typeof setTimeout ? setTimeout : i + } catch (e) { + n = i + } + try { + r = "function" == typeof clearTimeout ? clearTimeout : a + } catch (e) { + r = a + } + }(); + var s, l = [], + c = !1, + f = -1; + + function p() { + c && s && (c = !1, s.length ? l = s.concat(l) : f = -1, l.length && d()) + } + + function d() { + if (!c) { + var e = u(p); + c = !0; + for (var t = l.length; t;) { + for (s = l, l = []; ++f < t;) s && s[f].run(); + f = -1, t = l.length + } + s = null, c = !1, + function(e) { + if (r === clearTimeout) return clearTimeout(e); + if ((r === a || !r) && clearTimeout) return r = clearTimeout, clearTimeout(e); + try { + r(e) + } catch (t) { + try { + return r.call(null, e) + } catch (t) { + return r.call(this, e) + } + } + }(e) + } + } + + function h(e, t) { + this.fun = e, this.array = t + } + + function v() {} + o.nextTick = function(e) { + var t = new Array(arguments.length - 1); + if (arguments.length > 1) + for (var n = 1; n < arguments.length; n++) t[n - 1] = arguments[n]; + l.push(new h(e, t)), 1 !== l.length || c || u(d) + }, h.prototype.run = function() { + this.fun.apply(null, this.array) + }, o.title = "browser", o.browser = !0, o.env = {}, o.argv = [], o.version = "", o.versions = {}, o.on = v, o.addListener = v, o.once = v, o.off = v, o.removeListener = v, o.removeAllListeners = v, o.emit = v, o.prependListener = v, o.prependOnceListener = v, o.listeners = function(e) { + return [] + }, o.binding = function(e) { + throw new Error("process.binding is not supported") + }, o.cwd = function() { + return "/" + }, o.chdir = function(e) { + throw new Error("process.chdir is not supported") + }, o.umask = function() { + return 0 + } + }, function(e, t, n) { + "use strict"; + e.exports = function(e) { + if ("function" != typeof e) throw new TypeError(e + " is not a function"); + return e + } + }, function(e, t, n) { + "use strict"; + + function r(e, t) { + return e === t + } + + function o(e) { + var t = arguments.length <= 1 || void 0 === arguments[1] ? r : arguments[1], + n = null, + o = null; + return function() { + for (var r = arguments.length, i = Array(r), a = 0; a < r; a++) i[a] = arguments[a]; + return null !== n && n.length === i.length && i.every(function(e, r) { + return t(e, n[r]) + }) || (o = e.apply(void 0, i)), n = i, o + } + } + + function i(e) { + for (var t = arguments.length, n = Array(t > 1 ? t - 1 : 0), r = 1; r < t; r++) n[r - 1] = arguments[r]; + return function() { + for (var t = arguments.length, r = Array(t), o = 0; o < t; o++) r[o] = arguments[o]; + var i = 0, + a = r.pop(), + u = function(e) { + var t = Array.isArray(e[0]) ? e[0] : e; + if (!t.every(function(e) { + return "function" == typeof e + })) { + var n = t.map(function(e) { + return typeof e + }).join(", "); + throw new Error("Selector creators expect all input-selectors to be functions, instead received the following types: [" + n + "]") + } + return t + }(r), + s = e.apply(void 0, [function() { + return i++, a.apply(void 0, arguments) + }].concat(n)), + l = function(e, t) { + for (var n = arguments.length, r = Array(n > 2 ? n - 2 : 0), o = 2; o < n; o++) r[o - 2] = arguments[o]; + var i = u.map(function(n) { + return n.apply(void 0, [e, t].concat(r)) + }); + return s.apply(void 0, function(e) { + if (Array.isArray(e)) { + for (var t = 0, n = Array(e.length); t < e.length; t++) n[t] = e[t]; + return n + } + return Array.from(e) + }(i)) + }; + return l.resultFunc = a, l.recomputations = function() { + return i + }, l.resetRecomputations = function() { + return i = 0 + }, l + } + } + t.__esModule = !0, t.defaultMemoize = o, t.createSelectorCreator = i, t.createStructuredSelector = function(e) { + var t = arguments.length <= 1 || void 0 === arguments[1] ? a : arguments[1]; + if ("object" != typeof e) throw new Error("createStructuredSelector expects first argument to be an object where each property is a selector, instead received a " + typeof e); + var n = Object.keys(e); + return t(n.map(function(t) { + return e[t] + }), function() { + for (var e = arguments.length, t = Array(e), r = 0; r < e; r++) t[r] = arguments[r]; + return t.reduce(function(e, t, r) { + return e[n[r]] = t, e + }, {}) + }) + }; + var a = t.createSelector = i(o) + }, function(e, t, n) { + var r = n(117), + o = n(243); + e.exports = n(100) ? function(e, t, n) { + return r.f(e, t, o(1, n)) + } : function(e, t, n) { + return e[t] = n, e + } + }, function(e, t, n) { + var r = n(74); + e.exports = function(e) { + if (!r(e)) throw TypeError(e + " is not an object!"); + return e + } + }, function(e, t) { + var n = e.exports = { + version: "2.5.5" + }; + "number" == typeof __e && (__e = n) + }, function(e, t, n) { + var r = n(277); + e.exports = function(e) { + return null == e ? "" : r(e) + } + }, function(e, t, n) { + var r = n(77), + o = n(574), + i = n(575), + a = "[object Null]", + u = "[object Undefined]", + s = r ? r.toStringTag : void 0; + e.exports = function(e) { + return null == e ? void 0 === e ? u : a : s && s in Object(e) ? o(e) : i(e) + } + }, function(e, t, n) { + var r = n(592), + o = n(595); + e.exports = function(e, t) { + var n = o(e, t); + return r(n) ? n : void 0 + } + }, function(e, t, n) { + var r = n(295), + o = n(632), + i = n(78); + e.exports = function(e) { + return i(e) ? r(e) : o(e) + } + }, function(e, t, n) { + "use strict"; + var r = n(140), + o = Object.keys || function(e) { + var t = []; + for (var n in e) t.push(n); + return t + }; + e.exports = f; + var i = n(106); + i.inherits = n(81); + var a = n(305), + u = n(195); + i.inherits(f, a); + for (var s = o(u.prototype), l = 0; l < s.length; l++) { + var c = s[l]; + f.prototype[c] || (f.prototype[c] = u.prototype[c]) + } + + function f(e) { + if (!(this instanceof f)) return new f(e); + a.call(this, e), u.call(this, e), e && !1 === e.readable && (this.readable = !1), e && !1 === e.writable && (this.writable = !1), this.allowHalfOpen = !0, e && !1 === e.allowHalfOpen && (this.allowHalfOpen = !1), this.once("end", p) + } + + function p() { + this.allowHalfOpen || this._writableState.ended || r.nextTick(d, this) + } + + function d(e) { + e.end() + } + Object.defineProperty(f.prototype, "writableHighWaterMark", { + enumerable: !1, + get: function() { + return this._writableState.highWaterMark + } + }), Object.defineProperty(f.prototype, "destroyed", { + get: function() { + return void 0 !== this._readableState && void 0 !== this._writableState && (this._readableState.destroyed && this._writableState.destroyed) + }, + set: function(e) { + void 0 !== this._readableState && void 0 !== this._writableState && (this._readableState.destroyed = e, this._writableState.destroyed = e) + } + }), f.prototype._destroy = function(e, t) { + this.push(null), this.end(), r.nextTick(t, e) + } + }, function(e, t, n) { + "use strict"; + var r = n(312)(); + e.exports = function(e) { + return e !== r && null !== e + } + }, function(e, t, n) { + "use strict"; + var r = n(670), + o = Math.max; + e.exports = function(e) { + return o(0, r(e)) + } + }, function(e, t, n) { + "use strict" + }, function(e, t, n) { + "use strict"; + var r = n(11), + o = (n(8), function(e) { + if (this.instancePool.length) { + var t = this.instancePool.pop(); + return this.call(t, e), t + } + return new this(e) + }), + i = function(e) { + e instanceof this || r("25"), e.destructor(), this.instancePool.length < this.poolSize && this.instancePool.push(e) + }, + a = o, + u = { + addPoolingTo: function(e, t) { + var n = e; + return n.instancePool = [], n.getPooled = t || a, n.poolSize || (n.poolSize = 10), n.release = i, n + }, + oneArgumentPooler: o, + twoArgumentPooler: function(e, t) { + if (this.instancePool.length) { + var n = this.instancePool.pop(); + return this.call(n, e, t), n + } + return new this(e, t) + }, + threeArgumentPooler: function(e, t, n) { + if (this.instancePool.length) { + var r = this.instancePool.pop(); + return this.call(r, e, t, n), r + } + return new this(e, t, n) + }, + fourArgumentPooler: function(e, t, n, r) { + if (this.instancePool.length) { + var o = this.instancePool.pop(); + return this.call(o, e, t, n, r), o + } + return new this(e, t, n, r) + } + }; + e.exports = u + }, function(e, t) { + e.exports = {} + }, function(e, t, n) { + var r = n(154), + o = n(155); + e.exports = function(e) { + return r(o(e)) + } + }, function(e, t, n) { + var r = n(155); + e.exports = function(e) { + return Object(r(e)) + } + }, function(e, t, n) { + var r = n(33), + o = n(58), + i = n(118), + a = n(167)("src"), + u = Function.toString, + s = ("" + u).split("toString"); + n(60).inspectSource = function(e) { + return u.call(e) + }, (e.exports = function(e, t, n, u) { + var l = "function" == typeof n; + l && (i(n, "name") || o(n, "name", t)), e[t] !== n && (l && (i(n, a) || o(n, a, e[t] ? "" + e[t] : s.join(String(t)))), e === r ? e[t] = n : u ? e[t] ? e[t] = n : o(e, t, n) : (delete e[t], o(e, t, n))) + })(Function.prototype, "toString", function() { + return "function" == typeof this && this[a] || u.call(this) + }) + }, function(e, t) { + e.exports = function(e) { + return "object" == typeof e ? null !== e : "function" == typeof e + } + }, function(e, t, n) { + "use strict"; + var r = n(13), + o = n(264), + i = n(536), + a = n(541), + u = n(76), + s = n(542), + l = n(545), + c = n(546), + f = n(548), + p = u.createElement, + d = u.createFactory, + h = u.cloneElement, + v = r, + m = function(e) { + return e + }, + g = { + Children: { + map: i.map, + forEach: i.forEach, + count: i.count, + toArray: i.toArray, + only: f + }, + Component: o.Component, + PureComponent: o.PureComponent, + createElement: p, + cloneElement: h, + isValidElement: u.isValidElement, + PropTypes: s, + createClass: c, + createFactory: d, + createMixin: m, + DOM: a, + version: l, + __spread: v + }; + e.exports = g + }, function(e, t, n) { + "use strict"; + var r = n(13), + o = n(46), + i = (n(9), n(266), Object.prototype.hasOwnProperty), + a = n(267), + u = { + key: !0, + ref: !0, + __self: !0, + __source: !0 + }; + + function s(e) { + return void 0 !== e.ref + } + + function l(e) { + return void 0 !== e.key + } + var c = function(e, t, n, r, o, i, u) { + var s = { + $$typeof: a, + type: e, + key: t, + ref: n, + props: u, + _owner: i + }; + return s + }; + c.createElement = function(e, t, n) { + var r, a = {}, + f = null, + p = null; + if (null != t) + for (r in s(t) && (p = t.ref), l(t) && (f = "" + t.key), void 0 === t.__self ? null : t.__self, void 0 === t.__source ? null : t.__source, t) i.call(t, r) && !u.hasOwnProperty(r) && (a[r] = t[r]); + var d = arguments.length - 2; + if (1 === d) a.children = n; + else if (d > 1) { + for (var h = Array(d), v = 0; v < d; v++) h[v] = arguments[v + 2]; + 0, a.children = h + } + if (e && e.defaultProps) { + var m = e.defaultProps; + for (r in m) void 0 === a[r] && (a[r] = m[r]) + } + return c(e, f, p, 0, 0, o.current, a) + }, c.createFactory = function(e) { + var t = c.createElement.bind(null, e); + return t.type = e, t + }, c.cloneAndReplaceKey = function(e, t) { + return c(e.type, t, e.ref, e._self, e._source, e._owner, e.props) + }, c.cloneElement = function(e, t, n) { + var a, f, p = r({}, e.props), + d = e.key, + h = e.ref, + v = (e._self, e._source, e._owner); + if (null != t) + for (a in s(t) && (h = t.ref, v = o.current), l(t) && (d = "" + t.key), e.type && e.type.defaultProps && (f = e.type.defaultProps), t) i.call(t, a) && !u.hasOwnProperty(a) && (void 0 === t[a] && void 0 !== f ? p[a] = f[a] : p[a] = t[a]); + var m = arguments.length - 2; + if (1 === m) p.children = n; + else if (m > 1) { + for (var g = Array(m), y = 0; y < m; y++) g[y] = arguments[y + 2]; + p.children = g + } + return c(e.type, d, h, 0, 0, v, p) + }, c.isValidElement = function(e) { + return "object" == typeof e && null !== e && e.$$typeof === a + }, e.exports = c + }, function(e, t, n) { + var r = n(37).Symbol; + e.exports = r + }, function(e, t, n) { + var r = n(285), + o = n(187); + e.exports = function(e) { + return null != e && o(e.length) && !r(e) + } + }, function(e, t, n) { + var r = n(24), + o = n(190), + i = n(640), + a = n(61); + e.exports = function(e, t) { + return r(e) ? e : o(e, t) ? [e] : i(a(e)) + } + }, function(e, t, n) { + var r = n(128), + o = 1 / 0; + e.exports = function(e) { + if ("string" == typeof e || r(e)) return e; + var t = e + ""; + return "0" == t && 1 / e == -o ? "-0" : t + } + }, function(e, t) { + "function" == typeof Object.create ? e.exports = function(e, t) { + e.super_ = t, e.prototype = Object.create(t.prototype, { + constructor: { + value: e, + enumerable: !1, + writable: !0, + configurable: !0 + } + }) + } : e.exports = function(e, t) { + e.super_ = t; + var n = function() {}; + n.prototype = t.prototype, e.prototype = new n, e.prototype.constructor = e + } + }, function(e, t, n) { + "use strict"; + var r = n(66); + e.exports = function(e) { + if (!r(e)) throw new TypeError("Cannot use null or undefined"); + return e + } + }, function(e, t, n) { + "use strict"; + t.__esModule = !0; + var r, o = n(728), + i = (r = o) && r.__esModule ? r : { + default: r + }; + t.default = function(e) { + if (Array.isArray(e)) { + for (var t = 0, n = Array(e.length); t < e.length; t++) n[t] = e[t]; + return n + } + return (0, i.default)(e) + } + }, function(e, t, n) { + "use strict"; + t.__esModule = !0, t.default = function(e, t) { + var n = {}; + for (var r in e) t.indexOf(r) >= 0 || Object.prototype.hasOwnProperty.call(e, r) && (n[r] = e[r]); + return n + } + }, function(e, t, n) { + "use strict"; + + function r(e) { + return void 0 === e || null === e + } + e.exports.isNothing = r, e.exports.isObject = function(e) { + return "object" == typeof e && null !== e + }, e.exports.toArray = function(e) { + return Array.isArray(e) ? e : r(e) ? [] : [e] + }, e.exports.repeat = function(e, t) { + var n, r = ""; + for (n = 0; n < t; n += 1) r += e; + return r + }, e.exports.isNegativeZero = function(e) { + return 0 === e && Number.NEGATIVE_INFINITY === 1 / e + }, e.exports.extend = function(e, t) { + var n, r, o, i; + if (t) + for (n = 0, r = (i = Object.keys(t)).length; n < r; n += 1) e[o = i[n]] = t[o]; + return e + } + }, function(e, t, n) { + "use strict"; + var r = n(85), + o = n(107), + i = n(16); + + function a(e, t, n) { + var r = []; + return e.include.forEach(function(e) { + n = a(e, t, n) + }), e[t].forEach(function(e) { + n.forEach(function(t, n) { + t.tag === e.tag && t.kind === e.kind && r.push(n) + }), n.push(e) + }), n.filter(function(e, t) { + return -1 === r.indexOf(t) + }) + } + + function u(e) { + this.include = e.include || [], this.implicit = e.implicit || [], this.explicit = e.explicit || [], this.implicit.forEach(function(e) { + if (e.loadKind && "scalar" !== e.loadKind) throw new o("There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.") + }), this.compiledImplicit = a(this, "implicit", []), this.compiledExplicit = a(this, "explicit", []), this.compiledTypeMap = function() { + var e, t, n = { + scalar: {}, + sequence: {}, + mapping: {}, + fallback: {} + }; + + function r(e) { + n[e.kind][e.tag] = n.fallback[e.tag] = e + } + for (e = 0, t = arguments.length; e < t; e += 1) arguments[e].forEach(r); + return n + }(this.compiledImplicit, this.compiledExplicit) + } + u.DEFAULT = null, u.create = function() { + var e, t; + switch (arguments.length) { + case 1: + e = u.DEFAULT, t = arguments[0]; + break; + case 2: + e = arguments[0], t = arguments[1]; + break; + default: + throw new o("Wrong number of arguments for Schema.create function") + } + if (e = r.toArray(e), t = r.toArray(t), !e.every(function(e) { + return e instanceof u + })) throw new o("Specified list of super schemas (or a single Schema object) contains a non-Schema object."); + if (!t.every(function(e) { + return e instanceof i + })) throw new o("Specified list of YAML types (or a single Type object) contains a non-Type object."); + return new u({ + include: e, + explicit: t + }) + }, e.exports = u + }, function(e, t, n) { + "use strict"; + var r = n(11); + n(8); + + function o(e, t) { + return (e & t) === t + } + var i = { + MUST_USE_PROPERTY: 1, + HAS_BOOLEAN_VALUE: 4, + HAS_NUMERIC_VALUE: 8, + HAS_POSITIVE_NUMERIC_VALUE: 24, + HAS_OVERLOADED_BOOLEAN_VALUE: 32, + injectDOMPropertyConfig: function(e) { + var t = i, + n = e.Properties || {}, + a = e.DOMAttributeNamespaces || {}, + s = e.DOMAttributeNames || {}, + l = e.DOMPropertyNames || {}, + c = e.DOMMutationMethods || {}; + for (var f in e.isCustomAttribute && u._isCustomAttributeFunctions.push(e.isCustomAttribute), n) { + u.properties.hasOwnProperty(f) && r("48", f); + var p = f.toLowerCase(), + d = n[f], + h = { + attributeName: p, + attributeNamespace: null, + propertyName: f, + mutationMethod: null, + mustUseProperty: o(d, t.MUST_USE_PROPERTY), + hasBooleanValue: o(d, t.HAS_BOOLEAN_VALUE), + hasNumericValue: o(d, t.HAS_NUMERIC_VALUE), + hasPositiveNumericValue: o(d, t.HAS_POSITIVE_NUMERIC_VALUE), + hasOverloadedBooleanValue: o(d, t.HAS_OVERLOADED_BOOLEAN_VALUE) + }; + if (h.hasBooleanValue + h.hasNumericValue + h.hasOverloadedBooleanValue <= 1 || r("50", f), s.hasOwnProperty(f)) { + var v = s[f]; + h.attributeName = v + } + a.hasOwnProperty(f) && (h.attributeNamespace = a[f]), l.hasOwnProperty(f) && (h.propertyName = l[f]), c.hasOwnProperty(f) && (h.mutationMethod = c[f]), u.properties[f] = h + } + } + }, + a = ":A-Z_a-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", + u = { + ID_ATTRIBUTE_NAME: "data-reactid", + ROOT_ATTRIBUTE_NAME: "data-reactroot", + ATTRIBUTE_NAME_START_CHAR: a, + ATTRIBUTE_NAME_CHAR: a + "\\-.0-9\\u00B7\\u0300-\\u036F\\u203F-\\u2040", + properties: {}, + getPossibleStandardName: null, + _isCustomAttributeFunctions: [], + isCustomAttribute: function(e) { + for (var t = 0; t < u._isCustomAttributeFunctions.length; t++) { + if ((0, u._isCustomAttributeFunctions[t])(e)) return !0 + } + return !1 + }, + injection: i + }; + e.exports = u + }, function(e, t, n) { + "use strict"; + var r = n(783); + n(39), n(9); + + function o() { + r.attachRefs(this, this._currentElement) + } + var i = { + mountComponent: function(e, t, n, r, i, a) { + var u = e.mountComponent(t, n, r, i, a); + return e._currentElement && null != e._currentElement.ref && t.getReactMountReady().enqueue(o, e), u + }, + getHostNode: function(e) { + return e.getHostNode() + }, + unmountComponent: function(e, t) { + r.detachRefs(e, e._currentElement), e.unmountComponent(t) + }, + receiveComponent: function(e, t, n, i) { + var a = e._currentElement; + if (t !== a || i !== e._context) { + 0; + var u = r.shouldUpdateRefs(a, t); + u && r.detachRefs(e, a), e.receiveComponent(t, n, i), u && e._currentElement && null != e._currentElement.ref && n.getReactMountReady().enqueue(o, e) + } + }, + performUpdateIfNecessary: function(e, t, n) { + e._updateBatchNumber === n && e.performUpdateIfNecessary(t) + } + }; + e.exports = i + }, function(e, t, n) { + "use strict"; + var r = n(217), + o = n(149), + i = n(218), + a = n(359), + u = "undefined" != typeof document && "number" == typeof document.documentMode || "undefined" != typeof navigator && "string" == typeof navigator.userAgent && /\bEdge\/\d/.test(navigator.userAgent); + + function s(e) { + if (u) { + var t = e.node, + n = e.children; + if (n.length) + for (var r = 0; r < n.length; r++) l(t, n[r], null); + else null != e.html ? o(t, e.html) : null != e.text && a(t, e.text) + } + } + var l = i(function(e, t, n) { + 11 === t.node.nodeType || 1 === t.node.nodeType && "object" === t.node.nodeName.toLowerCase() && (null == t.node.namespaceURI || t.node.namespaceURI === r.html) ? (s(t), e.insertBefore(t.node, n)) : (e.insertBefore(t.node, n), s(t)) + }); + + function c() { + return this.node.nodeName + } + + function f(e) { + return { + node: e, + children: [], + html: null, + text: null, + toString: c + } + } + f.insertTreeBefore = l, f.replaceChildWithTree = function(e, t) { + e.parentNode.replaceChild(t.node, e), s(t) + }, f.queueChild = function(e, t) { + u ? e.children.push(t) : e.node.appendChild(t.node) + }, f.queueHTML = function(e, t) { + u ? e.html = t : o(e.node, t) + }, f.queueText = function(e, t) { + u ? e.text = t : a(e.node, t) + }, e.exports = f + }, function(e, t, n) { + var r = n(146), + o = n(344); + e.exports = function(e, t, n, i) { + var a = !n; + n || (n = {}); + for (var u = -1, s = t.length; ++u < s;) { + var l = t[u], + c = i ? i(n[l], e[l], l, n, e) : void 0; + void 0 === c && (c = e[l]), a ? o(n, l, c) : r(n, l, c) + } + return n + } + }, function(e, t, n) { + e.exports = { + default: n(447), + __esModule: !0 + } + }, function(e, t, n) { + n(448); + for (var r = n(21), o = n(50), i = n(70), a = n(19)("toStringTag"), u = "CSSRuleList,CSSStyleDeclaration,CSSValueList,ClientRectList,DOMRectList,DOMStringList,DOMTokenList,DataTransferItemList,FileList,HTMLAllCollection,HTMLCollection,HTMLFormElement,HTMLSelectElement,MediaList,MimeTypeArray,NamedNodeMap,NodeList,PaintRequestList,Plugin,PluginArray,SVGLengthList,SVGNumberList,SVGPathSegList,SVGPointList,SVGStringList,SVGTransformList,SourceBufferList,StyleSheetList,TextTrackCueList,TextTrackList,TouchList".split(","), s = 0; s < u.length; s++) { + var l = u[s], + c = r[l], + f = c && c.prototype; + f && !f[a] && o(f, a, l), i[l] = i.Array + } + }, function(e, t) { + var n = {}.toString; + e.exports = function(e) { + return n.call(e).slice(8, -1) + } + }, function(e, t) { + e.exports = function(e) { + if ("function" != typeof e) throw TypeError(e + " is not a function!"); + return e + } + }, function(e, t) { + e.exports = function(e, t) { + return { + enumerable: !(1 & e), + configurable: !(2 & e), + writable: !(4 & e), + value: t + } + } + }, function(e, t, n) { + var r = n(239), + o = n(163); + e.exports = Object.keys || function(e) { + return r(e, o) + } + }, function(e, t, n) { + var r = n(40).f, + o = n(52), + i = n(19)("toStringTag"); + e.exports = function(e, t, n) { + e && !o(e = n ? e : e.prototype, i) && r(e, i, { + configurable: !0, + value: t + }) + } + }, function(e, t, n) { + "use strict"; + var r = n(455)(!0); + n(237)(String, "String", function(e) { + this._t = String(e), this._i = 0 + }, function() { + var e, t = this._t, + n = this._i; + return n >= t.length ? { + value: void 0, + done: !0 + } : (e = r(t, n), this._i += e.length, { + value: e, + done: !1 + }) + }) + }, function(e, t) { + var n = {}.toString; + e.exports = function(e) { + return n.call(e).slice(8, -1) + } + }, function(e, t, n) { + e.exports = !n(101)(function() { + return 7 != Object.defineProperty({}, "a", { + get: function() { + return 7 + } + }).a + }) + }, function(e, t) { + e.exports = function(e) { + try { + return !!e() + } catch (e) { + return !0 + } + } + }, function(e, t) { + e.exports = {} + }, function(e, t, n) { + var r = n(119), + o = Math.min; + e.exports = function(e) { + return e > 0 ? o(r(e), 9007199254740991) : 0 + } + }, function(e, t, n) { + "use strict"; + e.exports = function(e) { + for (var t = arguments.length - 1, n = "Minified React error #" + e + "; visit http://facebook.github.io/react/docs/error-decoder.html?invariant=" + e, r = 0; r < t; r++) n += "&args[]=" + encodeURIComponent(arguments[r + 1]); + n += " for the full message or use the non-minified dev environment for full errors and additional helpful warnings."; + var o = new Error(n); + throw o.name = "Invariant Violation", o.framesToPop = 1, o + } + }, function(e, t) { + e.exports = function(e, t) { + return e === t || e != e && t != t + } + }, function(e, t, n) { + (function(e) { + function n(e) { + return Object.prototype.toString.call(e) + } + t.isArray = function(e) { + return Array.isArray ? Array.isArray(e) : "[object Array]" === n(e) + }, t.isBoolean = function(e) { + return "boolean" == typeof e + }, t.isNull = function(e) { + return null === e + }, t.isNullOrUndefined = function(e) { + return null == e + }, t.isNumber = function(e) { + return "number" == typeof e + }, t.isString = function(e) { + return "string" == typeof e + }, t.isSymbol = function(e) { + return "symbol" == typeof e + }, t.isUndefined = function(e) { + return void 0 === e + }, t.isRegExp = function(e) { + return "[object RegExp]" === n(e) + }, t.isObject = function(e) { + return "object" == typeof e && null !== e + }, t.isDate = function(e) { + return "[object Date]" === n(e) + }, t.isError = function(e) { + return "[object Error]" === n(e) || e instanceof Error + }, t.isFunction = function(e) { + return "function" == typeof e + }, t.isPrimitive = function(e) { + return null === e || "boolean" == typeof e || "number" == typeof e || "string" == typeof e || "symbol" == typeof e || void 0 === e + }, t.isBuffer = e.isBuffer + }).call(t, n(54).Buffer) + }, function(e, t, n) { + "use strict"; + + function r(e, t) { + Error.call(this), this.name = "YAMLException", this.reason = e, this.mark = t, this.message = (this.reason || "(unknown reason)") + (this.mark ? " " + this.mark.toString() : ""), Error.captureStackTrace ? Error.captureStackTrace(this, this.constructor) : this.stack = (new Error).stack || "" + } + r.prototype = Object.create(Error.prototype), r.prototype.constructor = r, r.prototype.toString = function(e) { + var t = this.name + ": "; + return t += this.reason || "(unknown reason)", !e && this.mark && (t += " " + this.mark.toString()), t + }, e.exports = r + }, function(e, t, n) { + "use strict"; + var r = n(86); + e.exports = new r({ + include: [n(340)], + implicit: [n(751), n(752)], + explicit: [n(753), n(754), n(755), n(756)] + }) + }, function(e, t, n) { + "use strict"; + var r = n(110), + o = n(211), + i = n(351), + a = n(352), + u = (n(9), r.getListener); + + function s(e, t, n) { + var r = function(e, t, n) { + var r = t.dispatchConfig.phasedRegistrationNames[n]; + return u(e, r) + }(e, n, t); + r && (n._dispatchListeners = i(n._dispatchListeners, r), n._dispatchInstances = i(n._dispatchInstances, e)) + } + + function l(e) { + e && e.dispatchConfig.phasedRegistrationNames && o.traverseTwoPhase(e._targetInst, s, e) + } + + function c(e) { + if (e && e.dispatchConfig.phasedRegistrationNames) { + var t = e._targetInst, + n = t ? o.getParentInstance(t) : null; + o.traverseTwoPhase(n, s, e) + } + } + + function f(e, t, n) { + if (n && n.dispatchConfig.registrationName) { + var r = n.dispatchConfig.registrationName, + o = u(e, r); + o && (n._dispatchListeners = i(n._dispatchListeners, o), n._dispatchInstances = i(n._dispatchInstances, e)) + } + } + + function p(e) { + e && e.dispatchConfig.registrationName && f(e._targetInst, 0, e) + } + var d = { + accumulateTwoPhaseDispatches: function(e) { + a(e, l) + }, + accumulateTwoPhaseDispatchesSkipTarget: function(e) { + a(e, c) + }, + accumulateDirectDispatches: function(e) { + a(e, p) + }, + accumulateEnterLeaveDispatches: function(e, t, n, r) { + o.traverseEnterLeave(n, r, f, e, t) + } + }; + e.exports = d + }, function(e, t, n) { + "use strict"; + var r = n(11), + o = n(210), + i = n(211), + a = n(212), + u = n(351), + s = n(352), + l = (n(8), {}), + c = null, + f = function(e, t) { + e && (i.executeDispatchesInOrder(e, t), e.isPersistent() || e.constructor.release(e)) + }, + p = function(e) { + return f(e, !0) + }, + d = function(e) { + return f(e, !1) + }, + h = function(e) { + return "." + e._rootNodeID + }; + var v = { + injection: { + injectEventPluginOrder: o.injectEventPluginOrder, + injectEventPluginsByName: o.injectEventPluginsByName + }, + putListener: function(e, t, n) { + "function" != typeof n && r("94", t, typeof n); + var i = h(e); + (l[t] || (l[t] = {}))[i] = n; + var a = o.registrationNameModules[t]; + a && a.didPutListener && a.didPutListener(e, t, n) + }, + getListener: function(e, t) { + var n = l[t]; + if (function(e, t, n) { + switch (e) { + case "onClick": + case "onClickCapture": + case "onDoubleClick": + case "onDoubleClickCapture": + case "onMouseDown": + case "onMouseDownCapture": + case "onMouseMove": + case "onMouseMoveCapture": + case "onMouseUp": + case "onMouseUpCapture": + return !(!n.disabled || (r = t, "button" !== r && "input" !== r && "select" !== r && "textarea" !== r)); + default: + return !1 + } + var r + }(t, e._currentElement.type, e._currentElement.props)) return null; + var r = h(e); + return n && n[r] + }, + deleteListener: function(e, t) { + var n = o.registrationNameModules[t]; + n && n.willDeleteListener && n.willDeleteListener(e, t); + var r = l[t]; + r && delete r[h(e)] + }, + deleteAllListeners: function(e) { + var t = h(e); + for (var n in l) + if (l.hasOwnProperty(n) && l[n][t]) { + var r = o.registrationNameModules[n]; + r && r.willDeleteListener && r.willDeleteListener(e, n), delete l[n][t] + } + }, + extractEvents: function(e, t, n, r) { + for (var i, a = o.plugins, s = 0; s < a.length; s++) { + var l = a[s]; + if (l) { + var c = l.extractEvents(e, t, n, r); + c && (i = u(i, c)) + } + } + return i + }, + enqueueEvents: function(e) { + e && (c = u(c, e)) + }, + processEventQueue: function(e) { + var t = c; + c = null, s(t, e ? p : d), c && r("95"), a.rethrowCaughtError() + }, + __purge: function() { + l = {} + }, + __getListenerBank: function() { + return l + } + }; + e.exports = v + }, function(e, t, n) { + "use strict"; + var r = n(48), + o = n(213), + i = { + view: function(e) { + if (e.view) return e.view; + var t = o(e); + if (t.window === t) return t; + var n = t.ownerDocument; + return n ? n.defaultView || n.parentWindow : window + }, + detail: function(e) { + return e.detail || 0 + } + }; + + function a(e, t, n, o) { + return r.call(this, e, t, n, o) + } + r.augmentClass(a, i), e.exports = a + }, function(e, t, n) { + "use strict"; + var r = { + remove: function(e) { + e._reactInternalInstance = void 0 + }, + get: function(e) { + return e._reactInternalInstance + }, + has: function(e) { + return void 0 !== e._reactInternalInstance + }, + set: function(e, t) { + e._reactInternalInstance = t + } + }; + e.exports = r + }, function(e, t, n) { + var r; + /*! + Copyright (c) 2016 Jed Watson. + Licensed under the MIT License (MIT), see + http://jedwatson.github.io/classnames + */ + /*! + Copyright (c) 2016 Jed Watson. + Licensed under the MIT License (MIT), see + http://jedwatson.github.io/classnames + */ + ! function() { + "use strict"; + var n = {}.hasOwnProperty; + + function o() { + for (var e = [], t = 0; t < arguments.length; t++) { + var r = arguments[t]; + if (r) { + var i = typeof r; + if ("string" === i || "number" === i) e.push(r); + else if (Array.isArray(r)) e.push(o.apply(null, r)); + else if ("object" === i) + for (var a in r) n.call(r, a) && r[a] && e.push(a) + } + } + return e.join(" ") + } + void 0 !== e && e.exports ? e.exports = o : void 0 === (r = function() { + return o + }.apply(t, [])) || (e.exports = r) + }() + }, function(e, t) { + e.exports = !0 + }, function(e, t, n) { + var r = n(160), + o = Math.min; + e.exports = function(e) { + return e > 0 ? o(r(e), 9007199254740991) : 0 + } + }, function(e, t) { + var n = 0, + r = Math.random(); + e.exports = function(e) { + return "Symbol(".concat(void 0 === e ? "" : e, ")_", (++n + r).toString(36)) + } + }, function(e, t, n) { + var r = n(59), + o = n(459), + i = n(460), + a = Object.defineProperty; + t.f = n(100) ? Object.defineProperty : function(e, t, n) { + if (r(e), t = i(t, !0), r(n), o) try { + return a(e, t, n) + } catch (e) {} + if ("get" in n || "set" in n) throw TypeError("Accessors not supported!"); + return "value" in n && (e[t] = n.value), e + } + }, function(e, t) { + var n = {}.hasOwnProperty; + e.exports = function(e, t) { + return n.call(e, t) + } + }, function(e, t) { + var n = Math.ceil, + r = Math.floor; + e.exports = function(e) { + return isNaN(e = +e) ? 0 : (e > 0 ? r : n)(e) + } + }, function(e, t, n) { + var r = n(121); + e.exports = function(e, t, n) { + if (r(e), void 0 === t) return e; + switch (n) { + case 1: + return function(n) { + return e.call(t, n) + }; + case 2: + return function(n, r) { + return e.call(t, n, r) + }; + case 3: + return function(n, r, o) { + return e.call(t, n, r, o) + } + } + return function() { + return e.apply(t, arguments) + } + } + }, function(e, t) { + e.exports = function(e) { + if ("function" != typeof e) throw TypeError(e + " is not a function!"); + return e + } + }, function(e, t, n) { + var r = n(465), + o = n(53); + e.exports = function(e) { + return r(o(e)) + } + }, function(e, t, n) { + "use strict"; + var r = n(58), + o = n(73), + i = n(101), + a = n(53), + u = n(17); + e.exports = function(e, t, n) { + var s = u(e), + l = n(a, s, "" [e]), + c = l[0], + f = l[1]; + i(function() { + var t = {}; + return t[s] = function() { + return 7 + }, 7 != "" [e](t) + }) && (o(String.prototype, e, c), r(RegExp.prototype, s, 2 == t ? function(e, t) { + return f.call(e, this, t) + } : function(e) { + return f.call(e, this) + })) + } + }, function(e, t, n) { + var r = n(116)("meta"), + o = n(28), + i = n(52), + a = n(40).f, + u = 0, + s = Object.isExtensible || function() { + return !0 + }, + l = !n(51)(function() { + return s(Object.preventExtensions({})) + }), + c = function(e) { + a(e, r, { + value: { + i: "O" + ++u, + w: {} + } + }) + }, + f = e.exports = { + KEY: r, + NEED: !1, + fastKey: function(e, t) { + if (!o(e)) return "symbol" == typeof e ? e : ("string" == typeof e ? "S" : "P") + e; + if (!i(e, r)) { + if (!s(e)) return "F"; + if (!t) return "E"; + c(e) + } + return e[r].i + }, + getWeak: function(e, t) { + if (!i(e, r)) { + if (!s(e)) return !0; + if (!t) return !1; + c(e) + } + return e[r].w + }, + onFreeze: function(e) { + return l && f.NEED && s(e) && !i(e, r) && c(e), e + } + } + }, function(e, t) { + t.f = {}.propertyIsEnumerable + }, function(e, t, n) { + "use strict"; + var r = {}; + e.exports = r + }, function(e, t, n) { + "use strict"; + Object.defineProperty(t, "__esModule", { + value: !0 + }), t.CLEAR_BY = t.CLEAR = t.NEW_AUTH_ERR = t.NEW_SPEC_ERR_BATCH = t.NEW_SPEC_ERR = t.NEW_THROWN_ERR_BATCH = t.NEW_THROWN_ERR = void 0, t.newThrownErr = function(e) { + return { + type: a, + payload: (0, i.default)(e) + } + }, t.newThrownErrBatch = function(e) { + return { + type: u, + payload: e + } + }, t.newSpecErr = function(e) { + return { + type: s, + payload: e + } + }, t.newSpecErrBatch = function(e) { + return { + type: l, + payload: e + } + }, t.newAuthErr = function(e) { + return { + type: c, + payload: e + } + }, t.clear = function() { + var e = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : {}; + return { + type: f, + payload: e + } + }, t.clearBy = function() { + var e = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : function() { + return !0 + }; + return { + type: p, + payload: e + } + }; + var r, o = n(179), + i = (r = o) && r.__esModule ? r : { + default: r + }; + var a = t.NEW_THROWN_ERR = "err_new_thrown_err", + u = t.NEW_THROWN_ERR_BATCH = "err_new_thrown_err_batch", + s = t.NEW_SPEC_ERR = "err_new_spec_err", + l = t.NEW_SPEC_ERR_BATCH = "err_new_spec_err_batch", + c = t.NEW_AUTH_ERR = "err_new_auth_err", + f = t.CLEAR = "err_clear", + p = t.CLEAR_BY = "err_clear_by" + }, function(e, t, n) { + var r = n(62), + o = n(47), + i = "[object Symbol]"; + e.exports = function(e) { + return "symbol" == typeof e || o(e) && r(e) == i + } + }, function(e, t, n) { + var r = n(63)(Object, "create"); + e.exports = r + }, function(e, t, n) { + var r = n(600), + o = n(601), + i = n(602), + a = n(603), + u = n(604); + + function s(e) { + var t = -1, + n = null == e ? 0 : e.length; + for (this.clear(); ++t < n;) { + var r = e[t]; + this.set(r[0], r[1]) + } + } + s.prototype.clear = r, s.prototype.delete = o, s.prototype.get = i, s.prototype.has = a, s.prototype.set = u, e.exports = s + }, function(e, t, n) { + var r = n(105); + e.exports = function(e, t) { + for (var n = e.length; n--;) + if (r(e[n][0], t)) return n; + return -1 + } + }, function(e, t, n) { + var r = n(606); + e.exports = function(e, t) { + var n = e.__data__; + return r(t) ? n["string" == typeof t ? "string" : "hash"] : n.map + } + }, function(e, t, n) { + var r = n(611), + o = n(639), + i = n(191), + a = n(24), + u = n(644); + e.exports = function(e) { + return "function" == typeof e ? e : null == e ? i : "object" == typeof e ? a(e) ? o(e[0], e[1]) : r(e) : u(e) + } + }, function(e, t) { + e.exports = function(e) { + return e.webpackPolyfill || (e.deprecate = function() {}, e.paths = [], e.children || (e.children = []), Object.defineProperty(e, "loaded", { + enumerable: !0, + get: function() { + return e.l + } + }), Object.defineProperty(e, "id", { + enumerable: !0, + get: function() { + return e.i + } + }), e.webpackPolyfill = 1), e + } + }, function(e, t) { + var n = 9007199254740991, + r = /^(?:0|[1-9]\d*)$/; + e.exports = function(e, t) { + var o = typeof e; + return !!(t = null == t ? n : t) && ("number" == o || "symbol" != o && r.test(e)) && e > -1 && e % 1 == 0 && e < t + } + }, function(e, t) { + var n = Object.prototype; + e.exports = function(e) { + var t = e && e.constructor; + return e === ("function" == typeof t && t.prototype || n) + } + }, function(e, t, n) { + var r = n(634), + o = n(181), + i = n(635), + a = n(636), + u = n(637), + s = n(62), + l = n(286), + c = l(r), + f = l(o), + p = l(i), + d = l(a), + h = l(u), + v = s; + (r && "[object DataView]" != v(new r(new ArrayBuffer(1))) || o && "[object Map]" != v(new o) || i && "[object Promise]" != v(i.resolve()) || a && "[object Set]" != v(new a) || u && "[object WeakMap]" != v(new u)) && (v = function(e) { + var t = s(e), + n = "[object Object]" == t ? e.constructor : void 0, + r = n ? l(n) : ""; + if (r) switch (r) { + case c: + return "[object DataView]"; + case f: + return "[object Map]"; + case p: + return "[object Promise]"; + case d: + return "[object Set]"; + case h: + return "[object WeakMap]" + } + return t + }), e.exports = v + }, function(e, t, n) { + var r = n(139); + e.exports = function(e, t, n) { + var o = null == e ? void 0 : r(e, t); + return void 0 === o ? n : o + } + }, function(e, t, n) { + var r = n(79), + o = n(80); + e.exports = function(e, t) { + for (var n = 0, i = (t = r(t, e)).length; null != e && n < i;) e = e[o(t[n++])]; + return n && n == i ? e : void 0 + } + }, function(e, t, n) { + "use strict"; + (function(t) { + !t.version || 0 === t.version.indexOf("v0.") || 0 === t.version.indexOf("v1.") && 0 !== t.version.indexOf("v1.8.") ? e.exports = { + nextTick: function(e, n, r, o) { + if ("function" != typeof e) throw new TypeError('"callback" argument must be a function'); + var i, a, u = arguments.length; + switch (u) { + case 0: + case 1: + return t.nextTick(e); + case 2: + return t.nextTick(function() { + e.call(null, n) + }); + case 3: + return t.nextTick(function() { + e.call(null, n, r) + }); + case 4: + return t.nextTick(function() { + e.call(null, n, r, o) + }); + default: + for (i = new Array(u - 1), a = 0; a < i.length;) i[a++] = arguments[a]; + return t.nextTick(function() { + e.apply(null, i) + }) + } + } + } : e.exports = t + }).call(t, n(55)) + }, function(e, t, n) { + var r = n(54), + o = r.Buffer; + + function i(e, t) { + for (var n in e) t[n] = e[n] + } + + function a(e, t, n) { + return o(e, t, n) + } + o.from && o.alloc && o.allocUnsafe && o.allocUnsafeSlow ? e.exports = r : (i(r, t), t.Buffer = a), i(o, a), a.from = function(e, t, n) { + if ("number" == typeof e) throw new TypeError("Argument must not be a number"); + return o(e, t, n) + }, a.alloc = function(e, t, n) { + if ("number" != typeof e) throw new TypeError("Argument must be a number"); + var r = o(e); + return void 0 !== t ? "string" == typeof n ? r.fill(t, n) : r.fill(t) : r.fill(0), r + }, a.allocUnsafe = function(e) { + if ("number" != typeof e) throw new TypeError("Argument must be a number"); + return o(e) + }, a.allocUnsafeSlow = function(e) { + if ("number" != typeof e) throw new TypeError("Argument must be a number"); + return r.SlowBuffer(e) + } + }, function(e, t, n) { + "use strict"; + e.exports = n(675)("forEach") + }, function(e, t, n) { + "use strict"; + var r = n(314), + o = n(311), + i = n(196), + a = n(684); + (e.exports = function(e, t) { + var n, i, u, s, l; + return arguments.length < 2 || "string" != typeof e ? (s = t, t = e, e = null) : s = arguments[2], null == e ? (n = u = !0, i = !1) : (n = a.call(e, "c"), i = a.call(e, "e"), u = a.call(e, "w")), l = { + value: t, + configurable: n, + enumerable: i, + writable: u + }, s ? r(o(s), l) : l + }).gs = function(e, t, n) { + var u, s, l, c; + return "string" != typeof e ? (l = n, n = t, t = e, e = null) : l = arguments[3], null == t ? t = void 0 : i(t) ? null == n ? n = void 0 : i(n) || (l = n, n = void 0) : (l = t, t = n = void 0), null == e ? (u = !0, s = !1) : (u = a.call(e, "c"), s = a.call(e, "e")), c = { + get: t, + set: n, + configurable: u, + enumerable: s + }, l ? r(o(l), c) : c + } + }, function(e, t, n) { + var r = n(49), + o = n(329), + i = n(330), + a = n(36), + u = n(115), + s = n(164), + l = {}, + c = {}; + (t = e.exports = function(e, t, n, f, p) { + var d, h, v, m, g = p ? function() { + return e + } : s(e), + y = r(n, f, t ? 2 : 1), + b = 0; + if ("function" != typeof g) throw TypeError(e + " is not iterable!"); + if (i(g)) { + for (d = u(e.length); d > b; b++) + if ((m = t ? y(a(h = e[b])[0], h[1]) : y(e[b])) === l || m === c) return m + } else + for (v = g.call(e); !(h = v.next()).done;) + if ((m = o(v, y, h.value, t)) === l || m === c) return m + }).BREAK = l, t.RETURN = c + }, function(e, t, n) { + "use strict"; + var r = n(86); + e.exports = r.DEFAULT = new r({ + include: [n(108)], + explicit: [n(757), n(758), n(759)] + }) + }, function(e, t, n) { + var r = n(344), + o = n(105), + i = Object.prototype.hasOwnProperty; + e.exports = function(e, t, n) { + var a = e[t]; + i.call(e, t) && o(a, n) && (void 0 !== n || t in e) || r(e, t, n) + } + }, function(e, t, n) { + "use strict"; + var r = n(11), + o = (n(8), {}), + i = { + reinitializeTransaction: function() { + this.transactionWrappers = this.getTransactionWrappers(), this.wrapperInitData ? this.wrapperInitData.length = 0 : this.wrapperInitData = [], this._isInTransaction = !1 + }, + _isInTransaction: !1, + getTransactionWrappers: null, + isInTransaction: function() { + return !!this._isInTransaction + }, + perform: function(e, t, n, o, i, a, u, s) { + var l, c; + this.isInTransaction() && r("27"); + try { + this._isInTransaction = !0, l = !0, this.initializeAll(0), c = e.call(t, n, o, i, a, u, s), l = !1 + } finally { + try { + if (l) try { + this.closeAll(0) + } catch (e) {} else this.closeAll(0) + } finally { + this._isInTransaction = !1 + } + } + return c + }, + initializeAll: function(e) { + for (var t = this.transactionWrappers, n = e; n < t.length; n++) { + var r = t[n]; + try { + this.wrapperInitData[n] = o, this.wrapperInitData[n] = r.initialize ? r.initialize.call(this) : null + } finally { + if (this.wrapperInitData[n] === o) try { + this.initializeAll(n + 1) + } catch (e) {} + } + } + }, + closeAll: function(e) { + this.isInTransaction() || r("28"); + for (var t = this.transactionWrappers, n = e; n < t.length; n++) { + var i, a = t[n], + u = this.wrapperInitData[n]; + try { + i = !0, u !== o && a.close && a.close.call(this, u), i = !1 + } finally { + if (i) try { + this.closeAll(n + 1) + } catch (e) {} + } + } + this.wrapperInitData.length = 0 + } + }; + e.exports = i + }, function(e, t, n) { + "use strict"; + var r = n(111), + o = n(358), + i = { + screenX: null, + screenY: null, + clientX: null, + clientY: null, + ctrlKey: null, + shiftKey: null, + altKey: null, + metaKey: null, + getModifierState: n(215), + button: function(e) { + var t = e.button; + return "which" in e ? t : 2 === t ? 2 : 4 === t ? 1 : 0 + }, + buttons: null, + relatedTarget: function(e) { + return e.relatedTarget || (e.fromElement === e.srcElement ? e.toElement : e.fromElement) + }, + pageX: function(e) { + return "pageX" in e ? e.pageX : e.clientX + o.currentScrollLeft + }, + pageY: function(e) { + return "pageY" in e ? e.pageY : e.clientY + o.currentScrollTop + } + }; + + function a(e, t, n, o) { + return r.call(this, e, t, n, o) + } + r.augmentClass(a, i), e.exports = a + }, function(e, t, n) { + "use strict"; + var r, o = n(26), + i = n(217), + a = /^[ \r\n\t\f]/, + u = /<(!--|link|noscript|meta|script|style)[ \r\n\t\f\/>]/, + s = n(218)(function(e, t) { + if (e.namespaceURI !== i.svg || "innerHTML" in e) e.innerHTML = t; + else { + (r = r || document.createElement("div")).innerHTML = "" + t + ""; + for (var n = r.firstChild; n.firstChild;) e.appendChild(n.firstChild) + } + }); + if (o.canUseDOM) { + var l = document.createElement("div"); + l.innerHTML = " ", "" === l.innerHTML && (s = function(e, t) { + if (e.parentNode && e.parentNode.replaceChild(e, e), a.test(t) || "<" === t[0] && u.test(t)) { + e.innerHTML = String.fromCharCode(65279) + t; + var n = e.firstChild; + 1 === n.data.length ? e.removeChild(n) : n.deleteData(0, 1) + } else e.innerHTML = t + }), l = null + } + e.exports = s + }, function(e, t, n) { + "use strict"; + var r = /["'&<>]/; + e.exports = function(e) { + return "boolean" == typeof e || "number" == typeof e ? "" + e : function(e) { + var t, n = "" + e, + o = r.exec(n); + if (!o) return n; + var i = "", + a = 0, + u = 0; + for (a = o.index; a < n.length; a++) { + switch (n.charCodeAt(a)) { + case 34: + t = """; + break; + case 38: + t = "&"; + break; + case 39: + t = "'"; + break; + case 60: + t = "<"; + break; + case 62: + t = ">"; + break; + default: + continue + } + u !== a && (i += n.substring(u, a)), u = a + 1, i += t + } + return u !== a ? i + n.substring(u, a) : i + }(e) + } + }, function(e, t, n) { + "use strict"; + var r, o = n(13), + i = n(210), + a = n(804), + u = n(358), + s = n(805), + l = n(214), + c = {}, + f = !1, + p = 0, + d = { + topAbort: "abort", + topAnimationEnd: s("animationend") || "animationend", + topAnimationIteration: s("animationiteration") || "animationiteration", + topAnimationStart: s("animationstart") || "animationstart", + topBlur: "blur", + topCanPlay: "canplay", + topCanPlayThrough: "canplaythrough", + topChange: "change", + topClick: "click", + topCompositionEnd: "compositionend", + topCompositionStart: "compositionstart", + topCompositionUpdate: "compositionupdate", + topContextMenu: "contextmenu", + topCopy: "copy", + topCut: "cut", + topDoubleClick: "dblclick", + topDrag: "drag", + topDragEnd: "dragend", + topDragEnter: "dragenter", + topDragExit: "dragexit", + topDragLeave: "dragleave", + topDragOver: "dragover", + topDragStart: "dragstart", + topDrop: "drop", + topDurationChange: "durationchange", + topEmptied: "emptied", + topEncrypted: "encrypted", + topEnded: "ended", + topError: "error", + topFocus: "focus", + topInput: "input", + topKeyDown: "keydown", + topKeyPress: "keypress", + topKeyUp: "keyup", + topLoadedData: "loadeddata", + topLoadedMetadata: "loadedmetadata", + topLoadStart: "loadstart", + topMouseDown: "mousedown", + topMouseMove: "mousemove", + topMouseOut: "mouseout", + topMouseOver: "mouseover", + topMouseUp: "mouseup", + topPaste: "paste", + topPause: "pause", + topPlay: "play", + topPlaying: "playing", + topProgress: "progress", + topRateChange: "ratechange", + topScroll: "scroll", + topSeeked: "seeked", + topSeeking: "seeking", + topSelectionChange: "selectionchange", + topStalled: "stalled", + topSuspend: "suspend", + topTextInput: "textInput", + topTimeUpdate: "timeupdate", + topTouchCancel: "touchcancel", + topTouchEnd: "touchend", + topTouchMove: "touchmove", + topTouchStart: "touchstart", + topTransitionEnd: s("transitionend") || "transitionend", + topVolumeChange: "volumechange", + topWaiting: "waiting", + topWheel: "wheel" + }, + h = "_reactListenersID" + String(Math.random()).slice(2); + var v = o({}, a, { + ReactEventListener: null, + injection: { + injectReactEventListener: function(e) { + e.setHandleTopLevel(v.handleTopLevel), v.ReactEventListener = e + } + }, + setEnabled: function(e) { + v.ReactEventListener && v.ReactEventListener.setEnabled(e) + }, + isEnabled: function() { + return !(!v.ReactEventListener || !v.ReactEventListener.isEnabled()) + }, + listenTo: function(e, t) { + for (var n = t, r = function(e) { + return Object.prototype.hasOwnProperty.call(e, h) || (e[h] = p++, c[e[h]] = {}), c[e[h]] + }(n), o = i.registrationNameDependencies[e], a = 0; a < o.length; a++) { + var u = o[a]; + r.hasOwnProperty(u) && r[u] || ("topWheel" === u ? l("wheel") ? v.ReactEventListener.trapBubbledEvent("topWheel", "wheel", n) : l("mousewheel") ? v.ReactEventListener.trapBubbledEvent("topWheel", "mousewheel", n) : v.ReactEventListener.trapBubbledEvent("topWheel", "DOMMouseScroll", n) : "topScroll" === u ? l("scroll", !0) ? v.ReactEventListener.trapCapturedEvent("topScroll", "scroll", n) : v.ReactEventListener.trapBubbledEvent("topScroll", "scroll", v.ReactEventListener.WINDOW_HANDLE) : "topFocus" === u || "topBlur" === u ? (l("focus", !0) ? (v.ReactEventListener.trapCapturedEvent("topFocus", "focus", n), v.ReactEventListener.trapCapturedEvent("topBlur", "blur", n)) : l("focusin") && (v.ReactEventListener.trapBubbledEvent("topFocus", "focusin", n), v.ReactEventListener.trapBubbledEvent("topBlur", "focusout", n)), r.topBlur = !0, r.topFocus = !0) : d.hasOwnProperty(u) && v.ReactEventListener.trapBubbledEvent(u, d[u], n), r[u] = !0) + } + }, + trapBubbledEvent: function(e, t, n) { + return v.ReactEventListener.trapBubbledEvent(e, t, n) + }, + trapCapturedEvent: function(e, t, n) { + return v.ReactEventListener.trapCapturedEvent(e, t, n) + }, + supportsEventPageXY: function() { + if (!document.createEvent) return !1; + var e = document.createEvent("MouseEvent"); + return null != e && "pageX" in e + }, + ensureScrollValueMonitoring: function() { + if (void 0 === r && (r = v.supportsEventPageXY()), !r && !f) { + var e = u.refreshScrollValues; + v.ReactEventListener.monitorScrollValue(e), f = !0 + } + } + }); + e.exports = v + }, function(e, t, n) { + "use strict"; + + function r() { + this.__rules__ = [], this.__cache__ = null + } + r.prototype.__find__ = function(e) { + for (var t = this.__rules__.length, n = -1; t--;) + if (this.__rules__[++n].name === e) return n; + return -1 + }, r.prototype.__compile__ = function() { + var e = this, + t = [""]; + e.__rules__.forEach(function(e) { + e.enabled && e.alt.forEach(function(e) { + t.indexOf(e) < 0 && t.push(e) + }) + }), e.__cache__ = {}, t.forEach(function(t) { + e.__cache__[t] = [], e.__rules__.forEach(function(n) { + n.enabled && (t && n.alt.indexOf(t) < 0 || e.__cache__[t].push(n.fn)) + }) + }) + }, r.prototype.at = function(e, t, n) { + var r = this.__find__(e), + o = n || {}; + if (-1 === r) throw new Error("Parser rule not found: " + e); + this.__rules__[r].fn = t, this.__rules__[r].alt = o.alt || [], this.__cache__ = null + }, r.prototype.before = function(e, t, n, r) { + var o = this.__find__(e), + i = r || {}; + if (-1 === o) throw new Error("Parser rule not found: " + e); + this.__rules__.splice(o, 0, { + name: t, + enabled: !0, + fn: n, + alt: i.alt || [] + }), this.__cache__ = null + }, r.prototype.after = function(e, t, n, r) { + var o = this.__find__(e), + i = r || {}; + if (-1 === o) throw new Error("Parser rule not found: " + e); + this.__rules__.splice(o + 1, 0, { + name: t, + enabled: !0, + fn: n, + alt: i.alt || [] + }), this.__cache__ = null + }, r.prototype.push = function(e, t, n) { + var r = n || {}; + this.__rules__.push({ + name: e, + enabled: !0, + fn: t, + alt: r.alt || [] + }), this.__cache__ = null + }, r.prototype.enable = function(e, t) { + e = Array.isArray(e) ? e : [e], t && this.__rules__.forEach(function(e) { + e.enabled = !1 + }), e.forEach(function(e) { + var t = this.__find__(e); + if (t < 0) throw new Error("Rules manager: invalid rule name " + e); + this.__rules__[t].enabled = !0 + }, this), this.__cache__ = null + }, r.prototype.disable = function(e) { + (e = Array.isArray(e) ? e : [e]).forEach(function(e) { + var t = this.__find__(e); + if (t < 0) throw new Error("Rules manager: invalid rule name " + e); + this.__rules__[t].enabled = !1 + }, this), this.__cache__ = null + }, r.prototype.getRules = function(e) { + return null === this.__cache__ && this.__compile__(), this.__cache__[e] || [] + }, e.exports = r + }, function(e, t, n) { + "use strict"; + e.exports = function(e, t) { + var n, r, o, i = -1, + a = e.posMax, + u = e.pos, + s = e.isInLabel; + if (e.isInLabel) return -1; + if (e.labelUnmatchedScopes) return e.labelUnmatchedScopes--, -1; + for (e.pos = t + 1, e.isInLabel = !0, n = 1; e.pos < a;) { + if (91 === (o = e.src.charCodeAt(e.pos))) n++; + else if (93 === o && 0 === --n) { + r = !0; + break + } + e.parser.skipToken(e) + } + return r ? (i = e.pos, e.labelUnmatchedScopes = 0) : e.labelUnmatchedScopes = n - 1, e.pos = u, e.isInLabel = s, i + } + }, function(e, t, n) { + var r = n(93); + e.exports = Object("z").propertyIsEnumerable(0) ? Object : function(e) { + return "String" == r(e) ? e.split("") : Object(e) + } + }, function(e, t) { + e.exports = function(e) { + if (void 0 == e) throw TypeError("Can't call method on " + e); + return e + } + }, function(e, t, n) { + var r = n(28), + o = n(21).document, + i = r(o) && r(o.createElement); + e.exports = function(e) { + return i ? o.createElement(e) : {} + } + }, function(e, t, n) { + var r = n(28); + e.exports = function(e, t) { + if (!r(e)) return e; + var n, o; + if (t && "function" == typeof(n = e.toString) && !r(o = n.call(e))) return o; + if ("function" == typeof(n = e.valueOf) && !r(o = n.call(e))) return o; + if (!t && "function" == typeof(n = e.toString) && !r(o = n.call(e))) return o; + throw TypeError("Can't convert object to primitive value") + } + }, function(e, t, n) { + e.exports = n(50) + }, function(e, t, n) { + var r = n(36), + o = n(452), + i = n(163), + a = n(161)("IE_PROTO"), + u = function() {}, + s = function() { + var e, t = n(156)("iframe"), + r = i.length; + for (t.style.display = "none", n(240).appendChild(t), t.src = "javascript:", (e = t.contentWindow.document).open(), e.write("